From 0c243efe15926e97ccf4ff4e8fc28a5eafa1d41e Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Wed, 5 Mar 2025 13:28:46 +0530 Subject: [PATCH 01/37] add XFStests as native LISA tool --- lisa/tools/__init__.py | 2 + lisa/tools/xfstests.py | 839 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 841 insertions(+) create mode 100644 lisa/tools/xfstests.py diff --git a/lisa/tools/__init__.py b/lisa/tools/__init__.py index da08f164f3..cc3be0ad60 100644 --- a/lisa/tools/__init__.py +++ b/lisa/tools/__init__.py @@ -129,6 +129,7 @@ from .whoami import Whoami from .windows_feature import WindowsFeatureManagement from .wsl import Wsl +from .xfstests import Xfstests __all__ = [ "AptAddRepository", @@ -269,4 +270,5 @@ "Whoami", "WindowsFeatureManagement", "Wsl", + "Xfstests", ] diff --git a/lisa/tools/xfstests.py b/lisa/tools/xfstests.py new file mode 100644 index 0000000000..fc44689f62 --- /dev/null +++ b/lisa/tools/xfstests.py @@ -0,0 +1,839 @@ +# Copyright (c) Microsoft Corporation. +# Licensed under the MIT license. +import re +from dataclasses import dataclass +from pathlib import Path, PurePath +from typing import Any, Dict, List, Optional, Type, cast + +from assertpy import assert_that + +from lisa.executable import Tool +from lisa.messages import TestStatus, send_sub_test_result_message +from lisa.operating_system import ( + CBLMariner, + Debian, + Oracle, + Posix, + Redhat, + Suse, + Ubuntu, +) +from lisa.testsuite import TestResult +from lisa.tools import Cat, Chmod, Diff, Echo, Git, Ls, Make, Pgrep, Rm, Sed +from lisa.util import ( + PassedException, + LisaException, + UnsupportedDistroException, + find_patterns_in_lines, + SkippedException, +) + + +@dataclass +class XfstestsResult: + name: str = "" + status: TestStatus = TestStatus.QUEUED + message: str = "" + + +class Xfstests(Tool): + repo = "https://git.kernel.org/pub/scm/fs/xfs/xfstests-dev.git" + branch = "master" + common_dep = [ + "acl", + "attr", + "automake", + "bc", + "cifs-utils", + "dos2unix", + "dump", + "e2fsprogs", + "e2fsprogs-devel", + "gawk", + "gcc", + "libtool", + "lvm2", + "make", + "parted", + "quota", + "quota-devel", + "sed", + "xfsdump", + "xfsprogs", + "indent", + "python", + "fio", + "dbench", + ] + debian_dep = [ + "libacl1-dev", + "libaio-dev", + "libattr1-dev", + "libgdbm-dev", + "libtool-bin", + "libuuid1", + "libuuidm-ocaml-dev", + "sqlite3", + "uuid-dev", + "uuid-runtime", + "xfslibs-dev", + "zlib1g-dev", + "btrfs-tools", + "btrfs-progs", + "libgdbm-compat-dev", + "liburing-dev", + "liburing2", + ] + fedora_dep = [ + "libtool", + "libuuid-devel", + "libacl-devel", + "xfsprogs-devel", + "epel-release", + "libaio-devel", + "libattr-devel", + "sqlite", + "xfsprogs-qa-devel", + "zlib-devel", + "btrfs-progs-devel", + "llvm-ocaml-devel", + "uuid-devel", + "libtool", + "e2fsprogs-devel", + "gdbm-devel", + ] + suse_dep = [ + "btrfsprogs", + "libacl-devel", + "libaio-devel", + "libattr-devel", + "sqlite", + "xfsprogs-devel", + "lib-devel", + ] + mariner_dep = [ + "python-iniparse", + "libacl-devel", + "libaio-devel", + "libattr-devel", + "sqlite", + "xfsprogs-devel", + "zlib-devel", + "trfs-progs-devel", + "diffutils", + "btrfs-progs", + "btrfs-progs-devel", + "gcc", + "autoconf", + "binutils", + "kernel-headers", + "util-linux-devel", + "psmisc", + "perl-CPAN", + ] + # Passed all 35 tests + __all_pass_pattern = re.compile( + r"([\w\W]*?)Passed all (?P\d+) tests", re.MULTILINE + ) + # Failed 22 of 514 tests + __fail_pattern = re.compile( + r"([\w\W]*?)Failed (?P\d+) of (?P\d+) tests", + re.MULTILINE, + ) + # Failures: generic/079 generic/193 generic/230 generic/256 generic/314 generic/317 generic/318 generic/355 generic/382 generic/523 generic/536 generic/553 generic/554 generic/565 generic/566 generic/587 generic/594 generic/597 generic/598 generic/600 generic/603 generic/646 # noqa: E501 + __fail_cases_pattern = re.compile( + r"([\w\W]*?)Failures: (?P.*)", + re.MULTILINE, + ) + # Ran: generic/001 generic/002 generic/003 ... + __all_cases_pattern = re.compile( + r"([\w\W]*?)Ran: (?P.*)", + re.MULTILINE, + ) + # Not run: generic/110 generic/111 generic/115 ... + __not_run_cases_pattern = re.compile( + r"([\w\W]*?)Not run: (?P.*)", + re.MULTILINE, + ) + + @property + def command(self) -> str: + # The command is not used + # _check_exists is overwritten to check tool existence + return str(self.get_tool_path(use_global=True) / "xfstests-dev" / "check") + + @property + def can_install(self) -> bool: + return True + + @property + def dependencies(self) -> List[Type[Tool]]: + return [Git, Make] + + def run_test( + self, + # test_type: str, + log_path: Path, + result: TestResult, + test_section: str = "", + data_disk: str = "", + test_cases: str = "", + timeout: int = 14400, + ) -> None: + '''About: This method runs XFSTest on a given node with the specified + test group and test cases.If test_section is not specified , test is + run with "generic/quick" classification and XFS environment variables. + If test_section is specified, test is run with the specified test group + and XFS environment variables from local.config.If test_cases is specified, + only the specified test cases are run.If empty, all test cases barring + exclude.txt entries are run.Runtime is set to 4 hours by default, + but can be overridden by the user.This method after running xfstest + will parse the output and sends subtest results to the test result object. + + Parameters: + log_path: The path where the xfstests logs will be saved + result: The LISA test result object to which the subtest results will be sent + test_section: The test group name to be used for testing. + Defaults to "generic/quick" + note: if specified, test_section must exist in local.config + data_disk: The data disk used for testing + test_cases: The test cases to be run. If empty, all test cases barring + exclude.txt entries are run + timeout: The time in seconds after which the test will be timed out. + Defaults to 4 hours + + + usage example: + + xfstest.run_test( + log_path=Path("/tmp/xfstests"), + result=test_result, + test_section="generic/quick", + data_disk="/dev/sdb", + test_cases="generic/001 generic/002", + timeout=14400, + ) + ''' + # if Test group is specified, and exists in local.config, run tests. + if test_section: + self.run_async( + f"-s {test_section} -E exclude.txt {test_cases} > xfstest.log 2>&1", + sudo=True, + shell=True, + force_run=True, + cwd=self.get_xfstests_path(), + ) + # Else run generic quick test + else: + self.run_async( + f"-g generic/quick -E exclude.txt {test_cases} > xfstest.log 2>&1", + sudo=True, + shell=True, + force_run=True, + cwd=self.get_xfstests_path(), + ) + + pgrep = self.node.tools[Pgrep] + # this is the actual process name, when xfstests runs. + # monitor till process completes or timesout + try: + pgrep.wait_processes("check", timeout=timeout) + finally: + self.check_test_results( + log_path=log_path, + test_section=test_section if test_section else "generic", + result=result, + data_disk=data_disk, + ) + + def _initialize(self, *args: Any, **kwargs: Any) -> None: + super()._initialize(*args, **kwargs) + self._code_path = self.get_tool_path(use_global=True) / "xfstests-dev" + + def _install_dep(self) -> None: + ''' + This method will install dependencies based on OS. + Dependencies are fetched from ''' + posix_os: Posix = cast(Posix, self.node.os) + # install dependency packages + package_list = [] + package_list.extend(self.common_dep) + if isinstance(self.node.os, Redhat): + package_list.extend(self.fedora_dep) + elif isinstance(self.node.os, Debian): + if ( + isinstance(self.node.os, Ubuntu) + and self.node.os.information.version < "18.4.0" + ): + raise UnsupportedDistroException(self.node.os) + package_list.extend(self.debian_dep) + elif isinstance(self.node.os, Suse): + package_list.extend(self.suse_dep) + elif isinstance(self.node.os, CBLMariner): + package_list.extend(self.mariner_dep) + else: + raise LisaException( + f"Current distro {self.node.os.name} doesn't support xfstests." + ) + + # if install the packages in one command, the remain available packages can't + # be installed if one of packages is not available in that distro, + # so here install it one by one + for package in list(package_list): + # to make code simple, put all packages needed by one distro in one list. + # the package name may be different for the different sku of the + # same distro. so, install it when the package exists in the repo. + if posix_os.is_package_in_repo(package): + posix_os.install_packages(package) + # fix compile issue on RHEL/CentOS 7.x + if ( + isinstance(self.node.os, Redhat) + and self.node.os.information.version < "8.0.0" + ): + if isinstance(self.node.os, Oracle): + posix_os.install_packages("oracle-softwarecollection-release-el7") + else: + arch = self.node.os.get_kernel_information().hardware_platform + if arch == "x86_64": + xfsprogs_version = posix_os.get_package_information("xfsprogs") + # 4.5.0-20.el7.x86_64 + version_string = ".".join(map(str, xfsprogs_version[:3])) + str( + xfsprogs_version[4] + ) + # try to install the compatible version of xfsprogs-devel with + # xfsprogs package + posix_os.install_packages(f"xfsprogs-devel-{version_string}") + # check if xfsprogs-devel is installed successfully + assert_that(posix_os.package_exists("xfsprogs-devel")).described_as( + "xfsprogs-devel is not installed successfully, please check " + "whether it is available in the repo, and the available " + "versions are compatible with xfsprogs package." + ).is_true() + + posix_os.install_packages(packages="centos-release-scl") + posix_os.install_packages( + packages="devtoolset-7-gcc*", extra_args=["--skip-broken"] + ) + self.node.execute("rm -f /bin/gcc", sudo=True, shell=True) + self.node.execute( + "ln -s /opt/rh/devtoolset-7/root/usr/bin/gcc /bin/gcc", + sudo=True, + shell=True, + ) + # fix compile issue on SLES12SP5 + if ( + isinstance(self.node.os, Suse) + and self.node.os.information.version < "15.0.0" + ): + posix_os.install_packages(packages="gcc5") + self.node.execute("rm -rf /usr/bin/gcc", sudo=True, shell=True) + self.node.execute( + "ln -s /usr/bin/gcc-5 /usr/bin/gcc", + sudo=True, + shell=True, + ) + + def _add_test_users(self) -> None: + # prerequisite for xfstesting + # these users are used in the test code + # refer https://github.com/kdave/xfstests + self.node.execute("useradd -m fsgqa", sudo=True) + self.node.execute("groupadd fsgqa", sudo=True) + self.node.execute("useradd 123456-fsgqa", sudo=True) + self.node.execute("useradd fsgqa2", sudo=True) + + def _install(self, branch: Optional[str] = None, repo: Optional[str] = None) -> bool: + ''' + This method will download and install XFSTest on a given node. + Supported OS are Redhat, Debian, Suse, Ubuntu and CBLMariner3. + Dependencies are installed based on the OS type from _install_dep method. + The test users are added to the node using _add_test_users method. + This method allows you to specify custom repo and branch for xfstest. + Else this defaults to https://git.kernel.org/pub/scm/fs/xfs/xfstests-dev.git:master + Example Usage: + xfstest._install(branch="master", repo="https://git.kernel.org/pub/scm/fs/xfs/xfstests-dev.git") + ''' + branch = branch or self.branch + repo = repo or self.repo + self._install_dep() + self._add_test_users() + tool_path = self.get_tool_path(use_global=True) + git = self.node.tools[Git] + git.clone(url=repo, cwd=tool_path, ref=branch) + make = self.node.tools[Make] + code_path = tool_path.joinpath("xfstests-dev") + + self.node.tools[Rm].remove_file(str(code_path / "src" / "splice2pipe.c")) + self.node.tools[Sed].substitute( + regexp="splice2pipe", + replacement="", + file=str(code_path / "src" / "Makefile"), + ) + + make.make_install(code_path) + return True + + def get_xfstests_path(self) -> PurePath: + return self._code_path + + def set_local_config( + self, + file_system: str, + scratch_dev: str, + scratch_mnt: str, + test_dev: str, + test_folder: str, + test_section: str = "", + mount_opts: str = "", + testfs_mount_opts: str = "", + additional_parameters: Optional[Dict[str, str]] = None, + overwrite_config: bool = False, + ) -> None: + '''This method will create // append a local.config file in the install dir + local.config is used by XFStest to set global as well as testgroup options + Parameters: + scratch_dev (str) : (M)The scratch device to be used for testing + scratch_mnt (str) : (M)The scratch mount point to be used for testing + test_dev (str) : (M)The test device to be used for testing + test_folder (str) : (M)The test folder to be used for testing + file_system (str) : (M)The filesystem type to be tested + test_section (str) : (O)The test group name to be used for testing. + Defaults to the file_system + mount_opts (str) : (O)The mount options to be used for testing. + Empty signifies disk target + testfs_mount_opts (str): (O)The test filesystem mount options to be used for + testing.Defaults to mount_opts + additional_parameters (dict): (O)Additional parameters (dict) to be used for + testing + overwrite_config (bool): (O)If True, the existing local.config file will be + overwritten + + Example Usage: + xfstest.set_local_config( + scratch_dev="/dev/sdb", + scratch_mnt="/mnt/scratch", + test_dev="/dev/sdc", + test_folder="/mnt/test", + file_system="xfs", + test_section="xfs-custom", + mount_opts="noatime", + testfs_mount_opts="noatime", + additional_parameters={"TEST_DEV2": "/dev/sdd"}, + overwrite_config=True + ) + Note: This method will by default enforce dmesg logging. + All tests will have a corresponding dmesg log file in output folder. + ''' + xfstests_path = self.get_xfstests_path() + config_path = xfstests_path.joinpath("local.config") + # If overwrite is specified, remove the existing config file and start afresh + if overwrite_config and self.node.shell.exists(config_path): + self.node.shell.remove(config_path) + # If groupname is not provided, use Filesystem name. + # Warning !!!: if you create multiple sections, specify unique group names for each + if not test_section: + test_section = file_system + echo = self.node.tools[Echo] + # create the core config section + content = "\n".join( + [ + f"[{test_section}]", + f"FSTYP={file_system}", + f"SCRATCH_DEV={scratch_dev}", + f"SCRATCH_MNT={scratch_mnt}", + f"TEST_DEV={test_dev}", + f"TEST_DIR={test_folder}", + ] + ) + + # if Mount options are provided, append to the end of 'content' + if mount_opts: + content += f"\nMOUNT_OPTIONS='{mount_opts}'" + if testfs_mount_opts: + content += f"\nTEST_FS_MOUNT_OPTS='{testfs_mount_opts}'" + # if additional parameters are provided, append to the end of 'content' + if additional_parameters is not None: + for key, value in additional_parameters.items(): + content += f"\n{key}={value}" + # Finally enable DMESG + content += "\nKEEP_DMESG=yes" + # Append to the file if exists, else create a new file if none + echo.write_to_file(content, config_path, append=True) + + def set_excluded_tests(self, exclude_tests: str) -> None: + ''' + This method will create an exclude.txt file with the provided test cases. + The exclude.txt file is used by XFStest to exclude specific test cases from running. + The method takes in the following parameters: + exclude_tests: The test cases to be excluded from testing + Example Usage: + xfstest.set_excluded_tests(exclude_tests="generic/001 generic/002") + ''' + if exclude_tests: + xfstests_path = self.get_xfstests_path() + exclude_file_path = xfstests_path.joinpath("exclude.txt") + if self.node.shell.exists(exclude_file_path): + self.node.shell.remove(exclude_file_path) + echo = self.node.tools[Echo] + for exclude_test in exclude_tests.split(): + echo.write_to_file(exclude_test, exclude_file_path, append=True) + + # add more usable details in subtest additional information field + def create_send_subtest_msg( + self, + test_result: TestResult, + raw_message: str, + test_section: str, + data_disk: str, + ) -> None: + ''' + This method is internal to LISA and is not intended for direct calls. + This method will create and send subtest results to the test result object. + The method takes in the following parameters: + test_result: The test result object to which the subtest results will be sent + raw_message: The raw message from the xfstests output + test_section: The test group name used for testing + data_disk: The data disk used for testing + ''' + all_cases_match = self.__all_cases_pattern.match(raw_message) + assert all_cases_match, "fail to find run cases from xfstests output" + all_cases = (all_cases_match.group("all_cases")).split() + not_run_cases: List[str] = [] + fail_cases: List[str] = [] + not_run_match = self.__not_run_cases_pattern.match(raw_message) + if not_run_match: + not_run_cases = (not_run_match.group("not_run_cases")).split() + fail_match = self.__fail_cases_pattern.match(raw_message) + if fail_match: + fail_cases = (fail_match.group("fail_cases")).split() + pass_cases = [ + x for x in all_cases if x not in not_run_cases and x not in fail_cases + ] + results: List[XfstestsResult] = [] + for case in fail_cases: + results.append( + XfstestsResult( + name=case, + status=TestStatus.FAILED, + message=self.extract_case_content(case, raw_message), + ) + ) + for case in pass_cases: + results.append( + XfstestsResult( + name=case, + status=TestStatus.PASSED, + message=self.extract_case_content(case, raw_message), + ) + ) + for case in not_run_cases: + results.append( + XfstestsResult( + name=case, + status=TestStatus.SKIPPED, + message=self.extract_case_content(case, raw_message), + ) + ) + for result in results: + # create test result message + info: Dict[str, Any] = {} + info["information"] = {} + if test_section: + info["information"]["test_section"] = test_section + if data_disk: + info["information"]["data_disk"] = data_disk + info["information"]["test_details"] = str( + self.create_xfstest_stack_info( + result.name, test_section, str(result.status.name) + ) + ) + send_sub_test_result_message( + test_result=test_result, + test_case_name=result.name, + test_status=result.status, + test_message=result.message, + other_fields=info, + ) + + def check_test_results( + self, + log_path: Path, + test_section: str, + result: TestResult, + data_disk: str = "", + ) -> None: + ''' + This method is intended to be called by run_test method only. + This method will check the xfstests output and send subtest results + to the test result object. + This method depends on create_send_subtest_msg method to send + subtest results. + The method takes in the following parameters: + log_path: The path where the xfstests logs will be saved + test_section: The test group name used for testing + result: The test result object to which the subtest results will be sent + data_disk: The data disk used for testing + ''' + xfstests_path = self.get_xfstests_path() + console_log_results_path = xfstests_path / "xfstest.log" + results_path = xfstests_path / "results/check.log" + fail_cases_list: List[str] = [] + try: + if not self.node.shell.exists(console_log_results_path): + self._log.error( + f"Console log path {console_log_results_path} doesn't exist, please" + " check testing runs well or not." + ) + raise LisaException( + f"Console log path {console_log_results_path} doesn't exist, " + "please check testing runs well or not." + ) + else: + log_result = self.node.tools[Cat].run( + str(console_log_results_path), force_run=True, sudo=True + ) + log_result.assert_exit_code() + ansi_escape = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])") + raw_message = ansi_escape.sub("", log_result.stdout) + self.create_send_subtest_msg( + test_result=result, + raw_message=raw_message, + test_section=test_section, + data_disk=data_disk + ) + + if not self.node.shell.exists(results_path): + self._log.error( + f"Result path {results_path} doesn't exist, please check testing" + " runs well or not." + ) + raise LisaException( + f"Result path {results_path} doesn't exist, please check testing" + " runs well or not." + ) + else: + results = self.node.tools[Cat].run( + str(results_path), force_run=True, sudo=True + ) + results.assert_exit_code() + pass_match = self.__all_pass_pattern.match(results.stdout) + if pass_match: + pass_count = pass_match.group("pass_count") + self._log.debug( + f"All pass in xfstests, total pass case count is {pass_count}." + ) + # Xperimental : Passedexception with message/content of XFSTestLog. + # Intent is to display XFSReport on HTML pager. + # TODO: Fix this not displaying output + raise PassedException( + f"No Failed cases found in xfstests.\n" + f"XFSTestLog: {raw_message}" + ) + return + + fail_match = self.__fail_pattern.match(results.stdout) + if fail_match: + assert fail_match + fail_count = fail_match.group("fail_count") + total_count = fail_match.group("total_count") + fail_cases_match = self.__fail_cases_pattern.match(results.stdout) + assert fail_cases_match + fail_info = "" + fail_cases = fail_cases_match.group("fail_cases") + for fail_case in fail_cases.split(): + fail_info += find_patterns_in_lines( + raw_message, [re.compile(f".*{fail_case}.*$", re.MULTILINE)] + )[0][0] + fail_cases_list = fail_cases.split() + raise LisaException( + f"Fail {fail_count} cases of total {total_count},\n fail cases" + f" {fail_cases},\n details: \n{fail_info}, please investigate." + ) + else: + # Mark the fail count as zero, else code will fail since we never + # fetch fail_count from regex.This variable is used in Finally block + fail_count = 0 + self._log.debug("No failed cases found in xfstests.") + finally: + self.save_xfstests_log(fail_cases_list, log_path, test_section) + results_folder = xfstests_path / "results/" + self.node.execute(f"rm -rf {results_folder}", sudo=True) + self.node.execute(f"rm -f {console_log_results_path}", sudo=True) + + def save_xfstests_log( + self, fail_cases_list: List[str], log_path: Path, test_section: str + ) -> None: + ''' + This method is intended to be called by check_test_results method only. + This method will copy the output of XFSTest results to the host calling LISA + ''' + # if "generic" == test_section: + # test_type = "xfs" + xfstests_path = self.get_xfstests_path() + self.node.tools[Chmod].update_folder(str(xfstests_path), "a+rwx", sudo=True) + if self.node.shell.exists(xfstests_path / "results/check.log"): + self.node.shell.copy_back( + xfstests_path / "results/check.log", + log_path / "xfstests/check.log", + ) + if self.node.shell.exists(xfstests_path / "xfstest.log"): + self.node.shell.copy_back( + xfstests_path / "xfstest.log", + log_path / "xfstests/xfstest.log", + ) + + for fail_case in fail_cases_list: + file_name = f"results/{test_section}/{fail_case}.out.bad" + result_path = xfstests_path / file_name + if self.node.shell.exists(result_path): + self.node.shell.copy_back(result_path, log_path / file_name) + else: + self._log.debug(f"{file_name} doesn't exist.") + file_name = f"results/{test_section}/{fail_case}.full" + result_path = xfstests_path / file_name + if self.node.shell.exists(result_path): + self.node.shell.copy_back(result_path, log_path / file_name) + else: + self._log.debug(f"{file_name} doesn't exist.") + file_name = f"results/{test_section}/{fail_case}.dmesg" + result_path = xfstests_path / file_name + if self.node.shell.exists(result_path): + self.node.shell.copy_back(result_path, log_path / file_name) + else: + self._log.debug(f"{file_name} doesn't exist.") + + def extract_case_content(self, case: str, raw_message: str) -> str: + ''' + Support method to extract the content of a specific test case + from the xfstests output. Its intended for LISA use only. + The method takes in the following parameters: + case: The test case name for which the content is needed + raw_message: The raw message from the xfstests output + The method returns the content of the specific test case + + Example Usage: + xfstest.extract_case_content(case="generic/001", raw_message=raw_message) + ''' + # Define the pattern to match the specific case and capture all + # content until the next / line + pattern = re.compile( + rf"({case}.*?)(?=\n[a-zA-Z]+/\d+|\nRan: |\nNot run: |\nFailures: |\nSECTION|\Z)", + re.DOTALL, + ) + # Search for the pattern in the raw_message + result = pattern.search(raw_message) + + # Extract the matched content and remove the {case} from the start + if result: + extracted_content = result.group(1) + cleaned_content = re.sub(rf"^{case}\s*", "", extracted_content) + # Remove any string in [ ] at the start of the cleaned_content + cleaned_content = re.sub(r"^\[.*?\]\s*", "", cleaned_content) + return cleaned_content.strip() + else: + return "" + + def extract_file_content(self, file_path: str) -> str: + ''' + Support method to use the Cat command to extract file content. + This method is called by the create_xfstest_stack_info method. + The method takes in the following parameters: + file_path: The file path for which the content is needed + The method returns the content of the specific file + Example Usage: + xfstest.extract_file_content(file_path="/path/to/file") + ''' + # Use the cat tool to read the file content + if not Path(file_path).exists(): + self._log.debug(f"{file_path} doesn't exist.") + return "" + cat_tool = self.node.tools[Cat] + file_content = cat_tool.run(file_path, force_run=True) + return str(file_content.stdout) + + def create_xfstest_stack_info( + self, + case: str, + test_section: str, + test_status: str, + ) -> str: + ''' + This method is used to look up the xfstests results directory and + extract the dmesg and diff output for the given test case. + The method takes in the following parameters: + case: The test case name for which the stack info is needed + test_section: The test group name used for testing + test_status: The test status for the given test case + The method returns the stack info message for the given test case + + Example Usage: + xfstest.create_xfstest_stack_info( + case="generic/001", + test_section="xfs", + test_status="FAILED + ) + + Note: When running LISA in debug mode, you should expect to see a lot of messages + from 'ls' tool. This is because the method is checking for the existence of files + in the results directory. This is normal behavior and should be ignored. + This happens since we are looking for files for each test case rather than entire test run. + We are working on a fix to reduce the number of 'ls' calls and speed up the process. + ''' + # Get XFSTest current path. we are looking at results/{test_type} directory here + xfstests_path = self.get_xfstests_path() + test_class = case.split("/")[0] + test_id = case.split("/")[1] + result_path = xfstests_path / f"results/{test_section}/{test_class}" + return_message: str = "" + # this needs to be fixed as it's spilling over to console output. + if self.node.tools[Ls].path_exists(str(result_path), sudo=True): + + # Note. This will dump a lot of output on debug console screen. + # Only un-comment for debugging. + # self._log.debug( + # f"Found files in path {result_path} : " + # f"{self.node.tools[Ls].list(str(result_path), sudo=True)}" + # ) + # If passed, we only need DMESG output + if test_status == "PASSED": + dmesg_result = self.node.tools[Cat].run( + f"{result_path}/{test_id}.dmesg", force_run=True, sudo=True + ) + return_message = f"DMESG: {dmesg_result.stdout}" + # If failed, we need dmesg with diff output + elif test_status == "FAILED": + dmesg_result = self.node.tools[Cat].run( + f"{result_path}/{test_id}.dmesg", force_run=True, sudo=True + ) + full_out = result_path / f"{test_id}.full" + fail_out = result_path / f"{test_id}.out.bad" + # check of "full_out" and "fail_out" file exists + # Only then call diff tool. + if ((self.node.tools[Ls].path_exists(str(full_out), sudo=True)) and + (self.node.tools[Ls].path_exists(str(fail_out), sudo=True))): + diff_result = self.node.tools[Diff].comparefiles( + src=full_out, + dest=fail_out, + ) + # else if full_out is null, return the fail_out file content. + # In some test cases, full_out is not generated due to permissions + # of other issues. However a fail file will always exists in such cases. + elif self.node.tools[Ls].path_exists(str(fail_out), sudo=True): + diff_result = self.node.tools[Cat].run( + f"{result_path}/{test_id}.out.bad", force_run=True, sudo=True + ) + return_message = f"DIFF: {diff_result}\n\nDMESG: {dmesg_result.stdout}" + # return_message = f"DMESG: {dmesg_result.stdout}" + # No output is needed. Although we can add Dmesg in the future + elif test_status == "SKIPPED": + notrun_result = self.node.tools[Cat].run( + f"{result_path}/{test_id}.notrun", force_run=True, sudo=True + ) + return_message = f"NOTRUN: {notrun_result.stdout}" + else: + self._log.debug(f"No files found in path {result_path}") + return_message = f"No files found in path {result_path}" + self._log.debug( + f"Returning message from create_xfstest_stack_info : {return_message}" + ) + return return_message From 0e991ab078cfc2463909a5d455524efa312ef138 Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Wed, 5 Mar 2025 20:20:46 +0530 Subject: [PATCH 02/37] Update xfstests.py --- lisa/tools/xfstests.py | 177 ++++++++++++++++++++++++++--------------- 1 file changed, 113 insertions(+), 64 deletions(-) diff --git a/lisa/tools/xfstests.py b/lisa/tools/xfstests.py index fc44689f62..3de70ab3b4 100644 --- a/lisa/tools/xfstests.py +++ b/lisa/tools/xfstests.py @@ -3,7 +3,7 @@ import re from dataclasses import dataclass from pathlib import Path, PurePath -from typing import Any, Dict, List, Optional, Type, cast +from typing import Any, cast, Dict, List, Optional, Type, TYPE_CHECKING from assertpy import assert_that @@ -18,14 +18,16 @@ Suse, Ubuntu, ) -from lisa.testsuite import TestResult + +if TYPE_CHECKING: + from lisa.testsuite import TestResult + from lisa.tools import Cat, Chmod, Diff, Echo, Git, Ls, Make, Pgrep, Rm, Sed from lisa.util import ( - PassedException, LisaException, + PassedException, UnsupportedDistroException, find_patterns_in_lines, - SkippedException, ) @@ -37,8 +39,18 @@ class XfstestsResult: class Xfstests(Tool): + """ + Xfstests - Filesystem testing tool. + installed (default) from https://git.kernel.org/pub/scm/fs/xfs/xfstests-dev.git + Mirrored daily from kernel.org repository. + For details, refer to https://github.com/kdave/xfstests/blob/master/README + """ + + # This is the default repo and branch for xfstests. + # Override this via _install method if needed. repo = "https://git.kernel.org/pub/scm/fs/xfs/xfstests-dev.git" branch = "master" + # these are dependencies for xfstests. Update on regular basis. common_dep = [ "acl", "attr", @@ -131,25 +143,31 @@ class Xfstests(Tool): "psmisc", "perl-CPAN", ] + # Regular expression for parsing xfstests output + # Example: # Passed all 35 tests __all_pass_pattern = re.compile( r"([\w\W]*?)Passed all (?P\d+) tests", re.MULTILINE ) + # Example: # Failed 22 of 514 tests __fail_pattern = re.compile( r"([\w\W]*?)Failed (?P\d+) of (?P\d+) tests", re.MULTILINE, ) + # Example: # Failures: generic/079 generic/193 generic/230 generic/256 generic/314 generic/317 generic/318 generic/355 generic/382 generic/523 generic/536 generic/553 generic/554 generic/565 generic/566 generic/587 generic/594 generic/597 generic/598 generic/600 generic/603 generic/646 # noqa: E501 __fail_cases_pattern = re.compile( r"([\w\W]*?)Failures: (?P.*)", re.MULTILINE, ) + # Example: # Ran: generic/001 generic/002 generic/003 ... __all_cases_pattern = re.compile( r"([\w\W]*?)Ran: (?P.*)", re.MULTILINE, ) + # Example: # Not run: generic/110 generic/111 generic/115 ... __not_run_cases_pattern = re.compile( r"([\w\W]*?)Not run: (?P.*)", @@ -174,13 +192,13 @@ def run_test( self, # test_type: str, log_path: Path, - result: TestResult, - test_section: str = "", + result: "TestResult", + test_section: str, data_disk: str = "", test_cases: str = "", timeout: int = 14400, ) -> None: - '''About: This method runs XFSTest on a given node with the specified + """About: This method runs XFSTest on a given node with the specified test group and test cases.If test_section is not specified , test is run with "generic/quick" classification and XFS environment variables. If test_section is specified, test is run with the specified test group @@ -197,13 +215,13 @@ def run_test( Defaults to "generic/quick" note: if specified, test_section must exist in local.config data_disk: The data disk used for testing - test_cases: The test cases to be run. If empty, all test cases barring - exclude.txt entries are run + test_cases: The test cases to be run. If empty, all installed test cases + barring exclude.txt entries are run timeout: The time in seconds after which the test will be timed out. - Defaults to 4 hours + Defaults to 4 hours. - usage example: + Example: xfstest.run_test( log_path=Path("/tmp/xfstests"), @@ -213,7 +231,7 @@ def run_test( test_cases="generic/001 generic/002", timeout=14400, ) - ''' + """ # if Test group is specified, and exists in local.config, run tests. if test_section: self.run_async( @@ -223,7 +241,7 @@ def run_test( force_run=True, cwd=self.get_xfstests_path(), ) - # Else run generic quick test + # Else run generic quick test. This is not recommended. else: self.run_async( f"-g generic/quick -E exclude.txt {test_cases} > xfstest.log 2>&1", @@ -251,9 +269,12 @@ def _initialize(self, *args: Any, **kwargs: Any) -> None: self._code_path = self.get_tool_path(use_global=True) / "xfstests-dev" def _install_dep(self) -> None: - ''' - This method will install dependencies based on OS. - Dependencies are fetched from ''' + """ + About: This method will install dependencies based on OS. + Dependencies are fetched from the common arrays such as + common_dep, debian_dep, fedora_dep, suse_dep, mariner_dep. + If the OS is not supported, a LisaException is raised. + """ posix_os: Posix = cast(Posix, self.node.os) # install dependency packages package_list = [] @@ -343,16 +364,18 @@ def _add_test_users(self) -> None: self.node.execute("useradd fsgqa2", sudo=True) def _install(self, branch: Optional[str] = None, repo: Optional[str] = None) -> bool: - ''' - This method will download and install XFSTest on a given node. + """ + About:This method will download and install XFSTest on a given node. Supported OS are Redhat, Debian, Suse, Ubuntu and CBLMariner3. Dependencies are installed based on the OS type from _install_dep method. The test users are added to the node using _add_test_users method. + This method allows you to specify custom repo and branch for xfstest. Else this defaults to https://git.kernel.org/pub/scm/fs/xfs/xfstests-dev.git:master - Example Usage: + + Example: xfstest._install(branch="master", repo="https://git.kernel.org/pub/scm/fs/xfs/xfstests-dev.git") - ''' + """ branch = branch or self.branch repo = repo or self.repo self._install_dep() @@ -389,8 +412,17 @@ def set_local_config( additional_parameters: Optional[Dict[str, str]] = None, overwrite_config: bool = False, ) -> None: - '''This method will create // append a local.config file in the install dir + """ + About: This method will create // append a local.config file in the install dir local.config is used by XFStest to set global as well as testgroup options + + Note:You can call this method multiple times to create multiple sections. + The code does not checks for duplicate section names, so that is the users responsibility. + Also take note of how options are carried between sectoins, that include the sections which + not going to be run. + Recommend going through : https://github.com/kdave/xfstests/blob/master/README.config-sections + for more details on how to use local.config + Parameters: scratch_dev (str) : (M)The scratch device to be used for testing scratch_mnt (str) : (M)The scratch mount point to be used for testing @@ -408,7 +440,7 @@ def set_local_config( overwrite_config (bool): (O)If True, the existing local.config file will be overwritten - Example Usage: + Example: xfstest.set_local_config( scratch_dev="/dev/sdb", scratch_mnt="/mnt/scratch", @@ -423,7 +455,7 @@ def set_local_config( ) Note: This method will by default enforce dmesg logging. All tests will have a corresponding dmesg log file in output folder. - ''' + """ xfstests_path = self.get_xfstests_path() config_path = xfstests_path.joinpath("local.config") # If overwrite is specified, remove the existing config file and start afresh @@ -461,14 +493,18 @@ def set_local_config( echo.write_to_file(content, config_path, append=True) def set_excluded_tests(self, exclude_tests: str) -> None: - ''' - This method will create an exclude.txt file with the provided test cases. + """ + About:This method will create an exclude.txt file with the provided test cases. The exclude.txt file is used by XFStest to exclude specific test cases from running. The method takes in the following parameters: exclude_tests: The test cases to be excluded from testing + + Parameters: + exclude_tests (str): The test cases to be excluded from testing + Example Usage: xfstest.set_excluded_tests(exclude_tests="generic/001 generic/002") - ''' + """ if exclude_tests: xfstests_path = self.get_xfstests_path() exclude_file_path = xfstests_path.joinpath("exclude.txt") @@ -481,20 +517,23 @@ def set_excluded_tests(self, exclude_tests: str) -> None: # add more usable details in subtest additional information field def create_send_subtest_msg( self, - test_result: TestResult, + test_result: "TestResult", raw_message: str, test_section: str, data_disk: str, ) -> None: - ''' - This method is internal to LISA and is not intended for direct calls. + """ + About:This method is internal to LISA and is not intended for direct calls. This method will create and send subtest results to the test result object. - The method takes in the following parameters: + + Parmaeters: test_result: The test result object to which the subtest results will be sent raw_message: The raw message from the xfstests output test_section: The test group name used for testing - data_disk: The data disk used for testing - ''' + data_disk: The data disk used for testing. ( method is partially implemented ) + + + """ all_cases_match = self.__all_cases_pattern.match(raw_message) assert all_cases_match, "fail to find run cases from xfstests output" all_cases = (all_cases_match.group("all_cases")).split() @@ -559,21 +598,23 @@ def check_test_results( self, log_path: Path, test_section: str, - result: TestResult, + result: "TestResult", data_disk: str = "", ) -> None: - ''' - This method is intended to be called by run_test method only. + """ + About: This method is intended to be called by run_test method only. This method will check the xfstests output and send subtest results to the test result object. This method depends on create_send_subtest_msg method to send subtest results. - The method takes in the following parameters: + + Parameters: log_path: The path where the xfstests logs will be saved test_section: The test group name used for testing result: The test result object to which the subtest results will be sent - data_disk: The data disk used for testing - ''' + data_disk: The data disk used for testing ( Method partially implemented ) + + """ xfstests_path = self.get_xfstests_path() console_log_results_path = xfstests_path / "xfstest.log" results_path = xfstests_path / "results/check.log" @@ -663,10 +704,12 @@ def check_test_results( def save_xfstests_log( self, fail_cases_list: List[str], log_path: Path, test_section: str ) -> None: - ''' - This method is intended to be called by check_test_results method only. - This method will copy the output of XFSTest results to the host calling LISA - ''' + """ + About:This method is intended to be called by check_test_results method only. + This method will copy the output of XFSTest results to the Log folder of host + calling LISA. Files copied are xfsresult.log, check.log and all failed cases files + if they exist. + """ # if "generic" == test_section: # test_type = "xfs" xfstests_path = self.get_xfstests_path() @@ -703,17 +746,17 @@ def save_xfstests_log( self._log.debug(f"{file_name} doesn't exist.") def extract_case_content(self, case: str, raw_message: str) -> str: - ''' - Support method to extract the content of a specific test case + """ + About:Support method to extract the content of a specific test case from the xfstests output. Its intended for LISA use only. The method takes in the following parameters: case: The test case name for which the content is needed raw_message: The raw message from the xfstests output The method returns the content of the specific test case - Example Usage: + Example: xfstest.extract_case_content(case="generic/001", raw_message=raw_message) - ''' + """ # Define the pattern to match the specific case and capture all # content until the next / line pattern = re.compile( @@ -734,15 +777,19 @@ def extract_case_content(self, case: str, raw_message: str) -> str: return "" def extract_file_content(self, file_path: str) -> str: - ''' - Support method to use the Cat command to extract file content. + """ + About: Support method to use the Cat command to extract file content. This method is called by the create_xfstest_stack_info method. - The method takes in the following parameters: + Its purpose is to read the ASCII content of the file for further + tasks such as diff in case of failed cases. + + Parameters: file_path: The file path for which the content is needed The method returns the content of the specific file - Example Usage: + + Example: xfstest.extract_file_content(file_path="/path/to/file") - ''' + """ # Use the cat tool to read the file content if not Path(file_path).exists(): self._log.debug(f"{file_path} doesn't exist.") @@ -757,28 +804,31 @@ def create_xfstest_stack_info( test_section: str, test_status: str, ) -> str: - ''' - This method is used to look up the xfstests results directory and - extract the dmesg and diff output for the given test case. - The method takes in the following parameters: + """ + About:This method is used to look up the xfstests results directory and + extract the dmesg and full//fail diff output for the given test case. + + Parameters: case: The test case name for which the stack info is needed test_section: The test group name used for testing test_status: The test status for the given test case + + Returns: The method returns the stack info message for the given test case - Example Usage: + Example: xfstest.create_xfstest_stack_info( case="generic/001", test_section="xfs", test_status="FAILED ) - Note: When running LISA in debug mode, you should expect to see a lot of messages - from 'ls' tool. This is because the method is checking for the existence of files - in the results directory. This is normal behavior and should be ignored. - This happens since we are looking for files for each test case rather than entire test run. - We are working on a fix to reduce the number of 'ls' calls and speed up the process. - ''' + Note: When running LISA in debug mode, you should expect to see a lot of verbose + messages from 'ls' tool. This is because the method is checking for the existence + of files "per case basis" in the results directory. This is normal behavior and + should be ignored.We are working on a fix to reduce the verbosity of 'ls' calls + and speed up the process. + """ # Get XFSTest current path. we are looking at results/{test_type} directory here xfstests_path = self.get_xfstests_path() test_class = case.split("/")[0] @@ -787,7 +837,6 @@ def create_xfstest_stack_info( return_message: str = "" # this needs to be fixed as it's spilling over to console output. if self.node.tools[Ls].path_exists(str(result_path), sudo=True): - # Note. This will dump a lot of output on debug console screen. # Only un-comment for debugging. # self._log.debug( @@ -817,7 +866,7 @@ def create_xfstest_stack_info( ) # else if full_out is null, return the fail_out file content. # In some test cases, full_out is not generated due to permissions - # of other issues. However a fail file will always exists in such cases. + # or other issues. However a fail file will always exists in such cases. elif self.node.tools[Ls].path_exists(str(fail_out), sudo=True): diff_result = self.node.tools[Cat].run( f"{result_path}/{test_id}.out.bad", force_run=True, sudo=True From 85f9163356cacef9ac45cc44fec30186a31535b3 Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Wed, 5 Mar 2025 20:49:11 +0530 Subject: [PATCH 03/37] Update xfstests.py --- lisa/tools/xfstests.py | 33 ++++++--------------------------- 1 file changed, 6 insertions(+), 27 deletions(-) diff --git a/lisa/tools/xfstests.py b/lisa/tools/xfstests.py index 3de70ab3b4..148d2e4cad 100644 --- a/lisa/tools/xfstests.py +++ b/lisa/tools/xfstests.py @@ -207,7 +207,6 @@ def run_test( exclude.txt entries are run.Runtime is set to 4 hours by default, but can be overridden by the user.This method after running xfstest will parse the output and sends subtest results to the test result object. - Parameters: log_path: The path where the xfstests logs will be saved result: The LISA test result object to which the subtest results will be sent @@ -219,10 +218,7 @@ def run_test( barring exclude.txt entries are run timeout: The time in seconds after which the test will be timed out. Defaults to 4 hours. - - Example: - xfstest.run_test( log_path=Path("/tmp/xfstests"), result=test_result, @@ -369,10 +365,8 @@ def _install(self, branch: Optional[str] = None, repo: Optional[str] = None) -> Supported OS are Redhat, Debian, Suse, Ubuntu and CBLMariner3. Dependencies are installed based on the OS type from _install_dep method. The test users are added to the node using _add_test_users method. - This method allows you to specify custom repo and branch for xfstest. Else this defaults to https://git.kernel.org/pub/scm/fs/xfs/xfstests-dev.git:master - Example: xfstest._install(branch="master", repo="https://git.kernel.org/pub/scm/fs/xfs/xfstests-dev.git") """ @@ -415,14 +409,13 @@ def set_local_config( """ About: This method will create // append a local.config file in the install dir local.config is used by XFStest to set global as well as testgroup options - Note:You can call this method multiple times to create multiple sections. - The code does not checks for duplicate section names, so that is the users responsibility. - Also take note of how options are carried between sectoins, that include the sections which - not going to be run. + The code does not checks for duplicate section names, so that is the users + responsibility. + Also take note of how options are carried between sectoins, that include the + sections which are not going to be run. Recommend going through : https://github.com/kdave/xfstests/blob/master/README.config-sections for more details on how to use local.config - Parameters: scratch_dev (str) : (M)The scratch device to be used for testing scratch_mnt (str) : (M)The scratch mount point to be used for testing @@ -439,7 +432,6 @@ def set_local_config( testing overwrite_config (bool): (O)If True, the existing local.config file will be overwritten - Example: xfstest.set_local_config( scratch_dev="/dev/sdb", @@ -495,13 +487,12 @@ def set_local_config( def set_excluded_tests(self, exclude_tests: str) -> None: """ About:This method will create an exclude.txt file with the provided test cases. - The exclude.txt file is used by XFStest to exclude specific test cases from running. + The exclude.txt file is used by XFStest to exclude specific test cases from + running. The method takes in the following parameters: exclude_tests: The test cases to be excluded from testing - Parameters: exclude_tests (str): The test cases to be excluded from testing - Example Usage: xfstest.set_excluded_tests(exclude_tests="generic/001 generic/002") """ @@ -525,14 +516,11 @@ def create_send_subtest_msg( """ About:This method is internal to LISA and is not intended for direct calls. This method will create and send subtest results to the test result object. - Parmaeters: test_result: The test result object to which the subtest results will be sent raw_message: The raw message from the xfstests output test_section: The test group name used for testing data_disk: The data disk used for testing. ( method is partially implemented ) - - """ all_cases_match = self.__all_cases_pattern.match(raw_message) assert all_cases_match, "fail to find run cases from xfstests output" @@ -607,13 +595,11 @@ def check_test_results( to the test result object. This method depends on create_send_subtest_msg method to send subtest results. - Parameters: log_path: The path where the xfstests logs will be saved test_section: The test group name used for testing result: The test result object to which the subtest results will be sent data_disk: The data disk used for testing ( Method partially implemented ) - """ xfstests_path = self.get_xfstests_path() console_log_results_path = xfstests_path / "xfstest.log" @@ -753,7 +739,6 @@ def extract_case_content(self, case: str, raw_message: str) -> str: case: The test case name for which the content is needed raw_message: The raw message from the xfstests output The method returns the content of the specific test case - Example: xfstest.extract_case_content(case="generic/001", raw_message=raw_message) """ @@ -782,11 +767,9 @@ def extract_file_content(self, file_path: str) -> str: This method is called by the create_xfstest_stack_info method. Its purpose is to read the ASCII content of the file for further tasks such as diff in case of failed cases. - Parameters: file_path: The file path for which the content is needed The method returns the content of the specific file - Example: xfstest.extract_file_content(file_path="/path/to/file") """ @@ -807,22 +790,18 @@ def create_xfstest_stack_info( """ About:This method is used to look up the xfstests results directory and extract the dmesg and full//fail diff output for the given test case. - Parameters: case: The test case name for which the stack info is needed test_section: The test group name used for testing test_status: The test status for the given test case - Returns: The method returns the stack info message for the given test case - Example: xfstest.create_xfstest_stack_info( case="generic/001", test_section="xfs", test_status="FAILED ) - Note: When running LISA in debug mode, you should expect to see a lot of verbose messages from 'ls' tool. This is because the method is checking for the existence of files "per case basis" in the results directory. This is normal behavior and From ad52bbfc3f9db1a28b8846e24ae2a53b3f0818f6 Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Wed, 5 Mar 2025 21:14:14 +0530 Subject: [PATCH 04/37] Update xfstests.py --- lisa/tools/xfstests.py | 23 +++++++++++++++-------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/lisa/tools/xfstests.py b/lisa/tools/xfstests.py index 148d2e4cad..0861fde4ba 100644 --- a/lisa/tools/xfstests.py +++ b/lisa/tools/xfstests.py @@ -3,7 +3,7 @@ import re from dataclasses import dataclass from pathlib import Path, PurePath -from typing import Any, cast, Dict, List, Optional, Type, TYPE_CHECKING +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Type, cast from assertpy import assert_that @@ -214,7 +214,7 @@ def run_test( Defaults to "generic/quick" note: if specified, test_section must exist in local.config data_disk: The data disk used for testing - test_cases: The test cases to be run. If empty, all installed test cases + test_cases: The test cases to be run. If empty, all installed test cases barring exclude.txt entries are run timeout: The time in seconds after which the test will be timed out. Defaults to 4 hours. @@ -267,7 +267,7 @@ def _initialize(self, *args: Any, **kwargs: Any) -> None: def _install_dep(self) -> None: """ About: This method will install dependencies based on OS. - Dependencies are fetched from the common arrays such as + Dependencies are fetched from the common arrays such as common_dep, debian_dep, fedora_dep, suse_dep, mariner_dep. If the OS is not supported, a LisaException is raised. """ @@ -359,7 +359,11 @@ def _add_test_users(self) -> None: self.node.execute("useradd 123456-fsgqa", sudo=True) self.node.execute("useradd fsgqa2", sudo=True) - def _install(self, branch: Optional[str] = None, repo: Optional[str] = None) -> bool: + def _install( + self, + branch: Optional[str] = None, + repo: Optional[str] = None, + ) -> bool: """ About:This method will download and install XFSTest on a given node. Supported OS are Redhat, Debian, Suse, Ubuntu and CBLMariner3. @@ -445,7 +449,7 @@ def set_local_config( additional_parameters={"TEST_DEV2": "/dev/sdd"}, overwrite_config=True ) - Note: This method will by default enforce dmesg logging. + Note: This method will by default enforce dmesg logging. All tests will have a corresponding dmesg log file in output folder. """ xfstests_path = self.get_xfstests_path() @@ -454,7 +458,8 @@ def set_local_config( if overwrite_config and self.node.shell.exists(config_path): self.node.shell.remove(config_path) # If groupname is not provided, use Filesystem name. - # Warning !!!: if you create multiple sections, specify unique group names for each + # Warning !!!: if you create multiple sections, + # you must specify unique group names for each if not test_section: test_section = file_system echo = self.node.tools[Echo] @@ -837,8 +842,10 @@ def create_xfstest_stack_info( fail_out = result_path / f"{test_id}.out.bad" # check of "full_out" and "fail_out" file exists # Only then call diff tool. - if ((self.node.tools[Ls].path_exists(str(full_out), sudo=True)) and - (self.node.tools[Ls].path_exists(str(fail_out), sudo=True))): + if ( + (self.node.tools[Ls].path_exists(str(full_out), sudo=True)) + and (self.node.tools[Ls].path_exists(str(fail_out), sudo=True)) + ): diff_result = self.node.tools[Diff].comparefiles( src=full_out, dest=fail_out, From 34666dd25f0dd13179414b4bba4744664ef5bdf8 Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Wed, 5 Mar 2025 21:24:39 +0530 Subject: [PATCH 05/37] Update xfstests.py --- lisa/tools/xfstests.py | 26 ++++++++++++++++---------- 1 file changed, 16 insertions(+), 10 deletions(-) diff --git a/lisa/tools/xfstests.py b/lisa/tools/xfstests.py index 0861fde4ba..e911a912d0 100644 --- a/lisa/tools/xfstests.py +++ b/lisa/tools/xfstests.py @@ -370,9 +370,13 @@ def _install( Dependencies are installed based on the OS type from _install_dep method. The test users are added to the node using _add_test_users method. This method allows you to specify custom repo and branch for xfstest. - Else this defaults to https://git.kernel.org/pub/scm/fs/xfs/xfstests-dev.git:master + Else this defaults to: + https://git.kernel.org/pub/scm/fs/xfs/xfstests-dev.git:master Example: - xfstest._install(branch="master", repo="https://git.kernel.org/pub/scm/fs/xfs/xfstests-dev.git") + xfstest._install( + branch="master", + repo="https://git.kernel.org/pub/scm/fs/xfs/xfstests-dev.git" + ) """ branch = branch or self.branch repo = repo or self.repo @@ -418,7 +422,8 @@ def set_local_config( responsibility. Also take note of how options are carried between sectoins, that include the sections which are not going to be run. - Recommend going through : https://github.com/kdave/xfstests/blob/master/README.config-sections + Recommend going through link: + https://github.com/kdave/xfstests/blob/master/README.config-sections for more details on how to use local.config Parameters: scratch_dev (str) : (M)The scratch device to be used for testing @@ -492,7 +497,7 @@ def set_local_config( def set_excluded_tests(self, exclude_tests: str) -> None: """ About:This method will create an exclude.txt file with the provided test cases. - The exclude.txt file is used by XFStest to exclude specific test cases from + The exclude.txt file is used by XFStest to exclude specific test cases from running. The method takes in the following parameters: exclude_tests: The test cases to be excluded from testing @@ -631,7 +636,7 @@ def check_test_results( test_result=result, raw_message=raw_message, test_section=test_section, - data_disk=data_disk + data_disk=data_disk, ) if not self.node.shell.exists(results_path): @@ -698,8 +703,8 @@ def save_xfstests_log( """ About:This method is intended to be called by check_test_results method only. This method will copy the output of XFSTest results to the Log folder of host - calling LISA. Files copied are xfsresult.log, check.log and all failed cases files - if they exist. + calling LISA. Files copied are xfsresult.log, check.log and all failed cases + files if they exist. """ # if "generic" == test_section: # test_type = "xfs" @@ -750,7 +755,8 @@ def extract_case_content(self, case: str, raw_message: str) -> str: # Define the pattern to match the specific case and capture all # content until the next / line pattern = re.compile( - rf"({case}.*?)(?=\n[a-zA-Z]+/\d+|\nRan: |\nNot run: |\nFailures: |\nSECTION|\Z)", + rf"({case}.*?)(?=" + r"\n[a-zA-Z]+/\d+|\nRan: |\nNot run: |\nFailures: |\nSECTION|\Z)", re.DOTALL, ) # Search for the pattern in the raw_message @@ -770,7 +776,7 @@ def extract_file_content(self, file_path: str) -> str: """ About: Support method to use the Cat command to extract file content. This method is called by the create_xfstest_stack_info method. - Its purpose is to read the ASCII content of the file for further + Its purpose is to read the ASCII content of the file for further tasks such as diff in case of failed cases. Parameters: file_path: The file path for which the content is needed @@ -821,7 +827,7 @@ def create_xfstest_stack_info( return_message: str = "" # this needs to be fixed as it's spilling over to console output. if self.node.tools[Ls].path_exists(str(result_path), sudo=True): - # Note. This will dump a lot of output on debug console screen. + # Note. This will dump a lot of output on debug console screen. # Only un-comment for debugging. # self._log.debug( # f"Found files in path {result_path} : " From 34248f4fb6e8f490acb5bb891f1ee2d8a4a0a375 Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Wed, 5 Mar 2025 21:37:21 +0530 Subject: [PATCH 06/37] Update xfstests.py --- lisa/tools/xfstests.py | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/lisa/tools/xfstests.py b/lisa/tools/xfstests.py index e911a912d0..aa2c01acf6 100644 --- a/lisa/tools/xfstests.py +++ b/lisa/tools/xfstests.py @@ -799,8 +799,9 @@ def create_xfstest_stack_info( test_status: str, ) -> str: """ - About:This method is used to look up the xfstests results directory and - extract the dmesg and full//fail diff output for the given test case. + About:This method is used to look up the xfstests results directory and extract + dmesg and full/fail diff output for the given test case. + Parameters: case: The test case name for which the stack info is needed test_section: The test group name used for testing @@ -811,14 +812,14 @@ def create_xfstest_stack_info( xfstest.create_xfstest_stack_info( case="generic/001", test_section="xfs", - test_status="FAILED + test_status="FAILED" ) - Note: When running LISA in debug mode, you should expect to see a lot of verbose - messages from 'ls' tool. This is because the method is checking for the existence - of files "per case basis" in the results directory. This is normal behavior and - should be ignored.We are working on a fix to reduce the verbosity of 'ls' calls - and speed up the process. + Note: When running LISA in debug mode, expect verbose messages from 'ls' tool. + This is because the method checks for file existence per case in the results dir. + This is normal behavior and can be ignored. We are working on reducing verbosity + of 'ls' calls to improve performance. """ + # Get XFSTest current path. we are looking at results/{test_type} directory here xfstests_path = self.get_xfstests_path() test_class = case.split("/")[0] @@ -848,10 +849,9 @@ def create_xfstest_stack_info( fail_out = result_path / f"{test_id}.out.bad" # check of "full_out" and "fail_out" file exists # Only then call diff tool. - if ( - (self.node.tools[Ls].path_exists(str(full_out), sudo=True)) - and (self.node.tools[Ls].path_exists(str(fail_out), sudo=True)) - ): + if self.node.tools[Ls].path_exists( + str(full_out), sudo=True + ) and self.node.tools[Ls].path_exists(str(fail_out), sudo=True): diff_result = self.node.tools[Diff].comparefiles( src=full_out, dest=fail_out, From ae9274503e4aeadee8cb5fd8b77e448d6b07550e Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Wed, 5 Mar 2025 21:48:40 +0530 Subject: [PATCH 07/37] Update xfstests.py --- lisa/tools/xfstests.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lisa/tools/xfstests.py b/lisa/tools/xfstests.py index aa2c01acf6..d17af1bc90 100644 --- a/lisa/tools/xfstests.py +++ b/lisa/tools/xfstests.py @@ -815,7 +815,8 @@ def create_xfstest_stack_info( test_status="FAILED" ) Note: When running LISA in debug mode, expect verbose messages from 'ls' tool. - This is because the method checks for file existence per case in the results dir. + This is because the method checks for file existence per case in the results + dir. This is normal behavior and can be ignored. We are working on reducing verbosity of 'ls' calls to improve performance. """ From 355c35a0a6fd7f7be5d65be6cbe5ab77ddc3b040 Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Wed, 5 Mar 2025 22:21:36 +0530 Subject: [PATCH 08/37] Update xfstests.py --- lisa/tools/xfstests.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/lisa/tools/xfstests.py b/lisa/tools/xfstests.py index d17af1bc90..ec79de642a 100644 --- a/lisa/tools/xfstests.py +++ b/lisa/tools/xfstests.py @@ -861,10 +861,13 @@ def create_xfstest_stack_info( # In some test cases, full_out is not generated due to permissions # or other issues. However a fail file will always exists in such cases. elif self.node.tools[Ls].path_exists(str(fail_out), sudo=True): - diff_result = self.node.tools[Cat].run( - f"{result_path}/{test_id}.out.bad", force_run=True, sudo=True + fail_result = self.node.tools[Cat].run( + f"{result_path}/{test_id}.out.bad", + force_run=True, + sudo=True, ) - return_message = f"DIFF: {diff_result}\n\nDMESG: {dmesg_result.stdout}" + diff_result = fail_result.stdout + return_message = f"DIFF: {diff_result}\n\nDMESG: {dmesg_result}" # return_message = f"DMESG: {dmesg_result.stdout}" # No output is needed. Although we can add Dmesg in the future elif test_status == "SKIPPED": From 05db5fd8adf75d2527f271c4ea45fe3c82bb1ff8 Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Wed, 5 Mar 2025 22:26:57 +0530 Subject: [PATCH 09/37] Update xfstests.py --- lisa/tools/xfstests.py | 1 - 1 file changed, 1 deletion(-) diff --git a/lisa/tools/xfstests.py b/lisa/tools/xfstests.py index ec79de642a..b04cae1989 100644 --- a/lisa/tools/xfstests.py +++ b/lisa/tools/xfstests.py @@ -666,7 +666,6 @@ def check_test_results( f"No Failed cases found in xfstests.\n" f"XFSTestLog: {raw_message}" ) - return fail_match = self.__fail_pattern.match(results.stdout) if fail_match: From 6816384c5c7cf73bb06fab93af68b7a13b11b3b0 Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Fri, 7 Mar 2025 22:04:50 +0530 Subject: [PATCH 10/37] update XFSTesting and Xfstests tool --- lisa/tools/__init__.py | 2 - lisa/tools/xfstests.py | 883 -------------------- microsoft/testsuites/xfstests/xfstesting.py | 284 +++++-- microsoft/testsuites/xfstests/xfstests.py | 534 ++++++++++-- 4 files changed, 665 insertions(+), 1038 deletions(-) delete mode 100644 lisa/tools/xfstests.py diff --git a/lisa/tools/__init__.py b/lisa/tools/__init__.py index cc3be0ad60..da08f164f3 100644 --- a/lisa/tools/__init__.py +++ b/lisa/tools/__init__.py @@ -129,7 +129,6 @@ from .whoami import Whoami from .windows_feature import WindowsFeatureManagement from .wsl import Wsl -from .xfstests import Xfstests __all__ = [ "AptAddRepository", @@ -270,5 +269,4 @@ "Whoami", "WindowsFeatureManagement", "Wsl", - "Xfstests", ] diff --git a/lisa/tools/xfstests.py b/lisa/tools/xfstests.py deleted file mode 100644 index b04cae1989..0000000000 --- a/lisa/tools/xfstests.py +++ /dev/null @@ -1,883 +0,0 @@ -# Copyright (c) Microsoft Corporation. -# Licensed under the MIT license. -import re -from dataclasses import dataclass -from pathlib import Path, PurePath -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Type, cast - -from assertpy import assert_that - -from lisa.executable import Tool -from lisa.messages import TestStatus, send_sub_test_result_message -from lisa.operating_system import ( - CBLMariner, - Debian, - Oracle, - Posix, - Redhat, - Suse, - Ubuntu, -) - -if TYPE_CHECKING: - from lisa.testsuite import TestResult - -from lisa.tools import Cat, Chmod, Diff, Echo, Git, Ls, Make, Pgrep, Rm, Sed -from lisa.util import ( - LisaException, - PassedException, - UnsupportedDistroException, - find_patterns_in_lines, -) - - -@dataclass -class XfstestsResult: - name: str = "" - status: TestStatus = TestStatus.QUEUED - message: str = "" - - -class Xfstests(Tool): - """ - Xfstests - Filesystem testing tool. - installed (default) from https://git.kernel.org/pub/scm/fs/xfs/xfstests-dev.git - Mirrored daily from kernel.org repository. - For details, refer to https://github.com/kdave/xfstests/blob/master/README - """ - - # This is the default repo and branch for xfstests. - # Override this via _install method if needed. - repo = "https://git.kernel.org/pub/scm/fs/xfs/xfstests-dev.git" - branch = "master" - # these are dependencies for xfstests. Update on regular basis. - common_dep = [ - "acl", - "attr", - "automake", - "bc", - "cifs-utils", - "dos2unix", - "dump", - "e2fsprogs", - "e2fsprogs-devel", - "gawk", - "gcc", - "libtool", - "lvm2", - "make", - "parted", - "quota", - "quota-devel", - "sed", - "xfsdump", - "xfsprogs", - "indent", - "python", - "fio", - "dbench", - ] - debian_dep = [ - "libacl1-dev", - "libaio-dev", - "libattr1-dev", - "libgdbm-dev", - "libtool-bin", - "libuuid1", - "libuuidm-ocaml-dev", - "sqlite3", - "uuid-dev", - "uuid-runtime", - "xfslibs-dev", - "zlib1g-dev", - "btrfs-tools", - "btrfs-progs", - "libgdbm-compat-dev", - "liburing-dev", - "liburing2", - ] - fedora_dep = [ - "libtool", - "libuuid-devel", - "libacl-devel", - "xfsprogs-devel", - "epel-release", - "libaio-devel", - "libattr-devel", - "sqlite", - "xfsprogs-qa-devel", - "zlib-devel", - "btrfs-progs-devel", - "llvm-ocaml-devel", - "uuid-devel", - "libtool", - "e2fsprogs-devel", - "gdbm-devel", - ] - suse_dep = [ - "btrfsprogs", - "libacl-devel", - "libaio-devel", - "libattr-devel", - "sqlite", - "xfsprogs-devel", - "lib-devel", - ] - mariner_dep = [ - "python-iniparse", - "libacl-devel", - "libaio-devel", - "libattr-devel", - "sqlite", - "xfsprogs-devel", - "zlib-devel", - "trfs-progs-devel", - "diffutils", - "btrfs-progs", - "btrfs-progs-devel", - "gcc", - "autoconf", - "binutils", - "kernel-headers", - "util-linux-devel", - "psmisc", - "perl-CPAN", - ] - # Regular expression for parsing xfstests output - # Example: - # Passed all 35 tests - __all_pass_pattern = re.compile( - r"([\w\W]*?)Passed all (?P\d+) tests", re.MULTILINE - ) - # Example: - # Failed 22 of 514 tests - __fail_pattern = re.compile( - r"([\w\W]*?)Failed (?P\d+) of (?P\d+) tests", - re.MULTILINE, - ) - # Example: - # Failures: generic/079 generic/193 generic/230 generic/256 generic/314 generic/317 generic/318 generic/355 generic/382 generic/523 generic/536 generic/553 generic/554 generic/565 generic/566 generic/587 generic/594 generic/597 generic/598 generic/600 generic/603 generic/646 # noqa: E501 - __fail_cases_pattern = re.compile( - r"([\w\W]*?)Failures: (?P.*)", - re.MULTILINE, - ) - # Example: - # Ran: generic/001 generic/002 generic/003 ... - __all_cases_pattern = re.compile( - r"([\w\W]*?)Ran: (?P.*)", - re.MULTILINE, - ) - # Example: - # Not run: generic/110 generic/111 generic/115 ... - __not_run_cases_pattern = re.compile( - r"([\w\W]*?)Not run: (?P.*)", - re.MULTILINE, - ) - - @property - def command(self) -> str: - # The command is not used - # _check_exists is overwritten to check tool existence - return str(self.get_tool_path(use_global=True) / "xfstests-dev" / "check") - - @property - def can_install(self) -> bool: - return True - - @property - def dependencies(self) -> List[Type[Tool]]: - return [Git, Make] - - def run_test( - self, - # test_type: str, - log_path: Path, - result: "TestResult", - test_section: str, - data_disk: str = "", - test_cases: str = "", - timeout: int = 14400, - ) -> None: - """About: This method runs XFSTest on a given node with the specified - test group and test cases.If test_section is not specified , test is - run with "generic/quick" classification and XFS environment variables. - If test_section is specified, test is run with the specified test group - and XFS environment variables from local.config.If test_cases is specified, - only the specified test cases are run.If empty, all test cases barring - exclude.txt entries are run.Runtime is set to 4 hours by default, - but can be overridden by the user.This method after running xfstest - will parse the output and sends subtest results to the test result object. - Parameters: - log_path: The path where the xfstests logs will be saved - result: The LISA test result object to which the subtest results will be sent - test_section: The test group name to be used for testing. - Defaults to "generic/quick" - note: if specified, test_section must exist in local.config - data_disk: The data disk used for testing - test_cases: The test cases to be run. If empty, all installed test cases - barring exclude.txt entries are run - timeout: The time in seconds after which the test will be timed out. - Defaults to 4 hours. - Example: - xfstest.run_test( - log_path=Path("/tmp/xfstests"), - result=test_result, - test_section="generic/quick", - data_disk="/dev/sdb", - test_cases="generic/001 generic/002", - timeout=14400, - ) - """ - # if Test group is specified, and exists in local.config, run tests. - if test_section: - self.run_async( - f"-s {test_section} -E exclude.txt {test_cases} > xfstest.log 2>&1", - sudo=True, - shell=True, - force_run=True, - cwd=self.get_xfstests_path(), - ) - # Else run generic quick test. This is not recommended. - else: - self.run_async( - f"-g generic/quick -E exclude.txt {test_cases} > xfstest.log 2>&1", - sudo=True, - shell=True, - force_run=True, - cwd=self.get_xfstests_path(), - ) - - pgrep = self.node.tools[Pgrep] - # this is the actual process name, when xfstests runs. - # monitor till process completes or timesout - try: - pgrep.wait_processes("check", timeout=timeout) - finally: - self.check_test_results( - log_path=log_path, - test_section=test_section if test_section else "generic", - result=result, - data_disk=data_disk, - ) - - def _initialize(self, *args: Any, **kwargs: Any) -> None: - super()._initialize(*args, **kwargs) - self._code_path = self.get_tool_path(use_global=True) / "xfstests-dev" - - def _install_dep(self) -> None: - """ - About: This method will install dependencies based on OS. - Dependencies are fetched from the common arrays such as - common_dep, debian_dep, fedora_dep, suse_dep, mariner_dep. - If the OS is not supported, a LisaException is raised. - """ - posix_os: Posix = cast(Posix, self.node.os) - # install dependency packages - package_list = [] - package_list.extend(self.common_dep) - if isinstance(self.node.os, Redhat): - package_list.extend(self.fedora_dep) - elif isinstance(self.node.os, Debian): - if ( - isinstance(self.node.os, Ubuntu) - and self.node.os.information.version < "18.4.0" - ): - raise UnsupportedDistroException(self.node.os) - package_list.extend(self.debian_dep) - elif isinstance(self.node.os, Suse): - package_list.extend(self.suse_dep) - elif isinstance(self.node.os, CBLMariner): - package_list.extend(self.mariner_dep) - else: - raise LisaException( - f"Current distro {self.node.os.name} doesn't support xfstests." - ) - - # if install the packages in one command, the remain available packages can't - # be installed if one of packages is not available in that distro, - # so here install it one by one - for package in list(package_list): - # to make code simple, put all packages needed by one distro in one list. - # the package name may be different for the different sku of the - # same distro. so, install it when the package exists in the repo. - if posix_os.is_package_in_repo(package): - posix_os.install_packages(package) - # fix compile issue on RHEL/CentOS 7.x - if ( - isinstance(self.node.os, Redhat) - and self.node.os.information.version < "8.0.0" - ): - if isinstance(self.node.os, Oracle): - posix_os.install_packages("oracle-softwarecollection-release-el7") - else: - arch = self.node.os.get_kernel_information().hardware_platform - if arch == "x86_64": - xfsprogs_version = posix_os.get_package_information("xfsprogs") - # 4.5.0-20.el7.x86_64 - version_string = ".".join(map(str, xfsprogs_version[:3])) + str( - xfsprogs_version[4] - ) - # try to install the compatible version of xfsprogs-devel with - # xfsprogs package - posix_os.install_packages(f"xfsprogs-devel-{version_string}") - # check if xfsprogs-devel is installed successfully - assert_that(posix_os.package_exists("xfsprogs-devel")).described_as( - "xfsprogs-devel is not installed successfully, please check " - "whether it is available in the repo, and the available " - "versions are compatible with xfsprogs package." - ).is_true() - - posix_os.install_packages(packages="centos-release-scl") - posix_os.install_packages( - packages="devtoolset-7-gcc*", extra_args=["--skip-broken"] - ) - self.node.execute("rm -f /bin/gcc", sudo=True, shell=True) - self.node.execute( - "ln -s /opt/rh/devtoolset-7/root/usr/bin/gcc /bin/gcc", - sudo=True, - shell=True, - ) - # fix compile issue on SLES12SP5 - if ( - isinstance(self.node.os, Suse) - and self.node.os.information.version < "15.0.0" - ): - posix_os.install_packages(packages="gcc5") - self.node.execute("rm -rf /usr/bin/gcc", sudo=True, shell=True) - self.node.execute( - "ln -s /usr/bin/gcc-5 /usr/bin/gcc", - sudo=True, - shell=True, - ) - - def _add_test_users(self) -> None: - # prerequisite for xfstesting - # these users are used in the test code - # refer https://github.com/kdave/xfstests - self.node.execute("useradd -m fsgqa", sudo=True) - self.node.execute("groupadd fsgqa", sudo=True) - self.node.execute("useradd 123456-fsgqa", sudo=True) - self.node.execute("useradd fsgqa2", sudo=True) - - def _install( - self, - branch: Optional[str] = None, - repo: Optional[str] = None, - ) -> bool: - """ - About:This method will download and install XFSTest on a given node. - Supported OS are Redhat, Debian, Suse, Ubuntu and CBLMariner3. - Dependencies are installed based on the OS type from _install_dep method. - The test users are added to the node using _add_test_users method. - This method allows you to specify custom repo and branch for xfstest. - Else this defaults to: - https://git.kernel.org/pub/scm/fs/xfs/xfstests-dev.git:master - Example: - xfstest._install( - branch="master", - repo="https://git.kernel.org/pub/scm/fs/xfs/xfstests-dev.git" - ) - """ - branch = branch or self.branch - repo = repo or self.repo - self._install_dep() - self._add_test_users() - tool_path = self.get_tool_path(use_global=True) - git = self.node.tools[Git] - git.clone(url=repo, cwd=tool_path, ref=branch) - make = self.node.tools[Make] - code_path = tool_path.joinpath("xfstests-dev") - - self.node.tools[Rm].remove_file(str(code_path / "src" / "splice2pipe.c")) - self.node.tools[Sed].substitute( - regexp="splice2pipe", - replacement="", - file=str(code_path / "src" / "Makefile"), - ) - - make.make_install(code_path) - return True - - def get_xfstests_path(self) -> PurePath: - return self._code_path - - def set_local_config( - self, - file_system: str, - scratch_dev: str, - scratch_mnt: str, - test_dev: str, - test_folder: str, - test_section: str = "", - mount_opts: str = "", - testfs_mount_opts: str = "", - additional_parameters: Optional[Dict[str, str]] = None, - overwrite_config: bool = False, - ) -> None: - """ - About: This method will create // append a local.config file in the install dir - local.config is used by XFStest to set global as well as testgroup options - Note:You can call this method multiple times to create multiple sections. - The code does not checks for duplicate section names, so that is the users - responsibility. - Also take note of how options are carried between sectoins, that include the - sections which are not going to be run. - Recommend going through link: - https://github.com/kdave/xfstests/blob/master/README.config-sections - for more details on how to use local.config - Parameters: - scratch_dev (str) : (M)The scratch device to be used for testing - scratch_mnt (str) : (M)The scratch mount point to be used for testing - test_dev (str) : (M)The test device to be used for testing - test_folder (str) : (M)The test folder to be used for testing - file_system (str) : (M)The filesystem type to be tested - test_section (str) : (O)The test group name to be used for testing. - Defaults to the file_system - mount_opts (str) : (O)The mount options to be used for testing. - Empty signifies disk target - testfs_mount_opts (str): (O)The test filesystem mount options to be used for - testing.Defaults to mount_opts - additional_parameters (dict): (O)Additional parameters (dict) to be used for - testing - overwrite_config (bool): (O)If True, the existing local.config file will be - overwritten - Example: - xfstest.set_local_config( - scratch_dev="/dev/sdb", - scratch_mnt="/mnt/scratch", - test_dev="/dev/sdc", - test_folder="/mnt/test", - file_system="xfs", - test_section="xfs-custom", - mount_opts="noatime", - testfs_mount_opts="noatime", - additional_parameters={"TEST_DEV2": "/dev/sdd"}, - overwrite_config=True - ) - Note: This method will by default enforce dmesg logging. - All tests will have a corresponding dmesg log file in output folder. - """ - xfstests_path = self.get_xfstests_path() - config_path = xfstests_path.joinpath("local.config") - # If overwrite is specified, remove the existing config file and start afresh - if overwrite_config and self.node.shell.exists(config_path): - self.node.shell.remove(config_path) - # If groupname is not provided, use Filesystem name. - # Warning !!!: if you create multiple sections, - # you must specify unique group names for each - if not test_section: - test_section = file_system - echo = self.node.tools[Echo] - # create the core config section - content = "\n".join( - [ - f"[{test_section}]", - f"FSTYP={file_system}", - f"SCRATCH_DEV={scratch_dev}", - f"SCRATCH_MNT={scratch_mnt}", - f"TEST_DEV={test_dev}", - f"TEST_DIR={test_folder}", - ] - ) - - # if Mount options are provided, append to the end of 'content' - if mount_opts: - content += f"\nMOUNT_OPTIONS='{mount_opts}'" - if testfs_mount_opts: - content += f"\nTEST_FS_MOUNT_OPTS='{testfs_mount_opts}'" - # if additional parameters are provided, append to the end of 'content' - if additional_parameters is not None: - for key, value in additional_parameters.items(): - content += f"\n{key}={value}" - # Finally enable DMESG - content += "\nKEEP_DMESG=yes" - # Append to the file if exists, else create a new file if none - echo.write_to_file(content, config_path, append=True) - - def set_excluded_tests(self, exclude_tests: str) -> None: - """ - About:This method will create an exclude.txt file with the provided test cases. - The exclude.txt file is used by XFStest to exclude specific test cases from - running. - The method takes in the following parameters: - exclude_tests: The test cases to be excluded from testing - Parameters: - exclude_tests (str): The test cases to be excluded from testing - Example Usage: - xfstest.set_excluded_tests(exclude_tests="generic/001 generic/002") - """ - if exclude_tests: - xfstests_path = self.get_xfstests_path() - exclude_file_path = xfstests_path.joinpath("exclude.txt") - if self.node.shell.exists(exclude_file_path): - self.node.shell.remove(exclude_file_path) - echo = self.node.tools[Echo] - for exclude_test in exclude_tests.split(): - echo.write_to_file(exclude_test, exclude_file_path, append=True) - - # add more usable details in subtest additional information field - def create_send_subtest_msg( - self, - test_result: "TestResult", - raw_message: str, - test_section: str, - data_disk: str, - ) -> None: - """ - About:This method is internal to LISA and is not intended for direct calls. - This method will create and send subtest results to the test result object. - Parmaeters: - test_result: The test result object to which the subtest results will be sent - raw_message: The raw message from the xfstests output - test_section: The test group name used for testing - data_disk: The data disk used for testing. ( method is partially implemented ) - """ - all_cases_match = self.__all_cases_pattern.match(raw_message) - assert all_cases_match, "fail to find run cases from xfstests output" - all_cases = (all_cases_match.group("all_cases")).split() - not_run_cases: List[str] = [] - fail_cases: List[str] = [] - not_run_match = self.__not_run_cases_pattern.match(raw_message) - if not_run_match: - not_run_cases = (not_run_match.group("not_run_cases")).split() - fail_match = self.__fail_cases_pattern.match(raw_message) - if fail_match: - fail_cases = (fail_match.group("fail_cases")).split() - pass_cases = [ - x for x in all_cases if x not in not_run_cases and x not in fail_cases - ] - results: List[XfstestsResult] = [] - for case in fail_cases: - results.append( - XfstestsResult( - name=case, - status=TestStatus.FAILED, - message=self.extract_case_content(case, raw_message), - ) - ) - for case in pass_cases: - results.append( - XfstestsResult( - name=case, - status=TestStatus.PASSED, - message=self.extract_case_content(case, raw_message), - ) - ) - for case in not_run_cases: - results.append( - XfstestsResult( - name=case, - status=TestStatus.SKIPPED, - message=self.extract_case_content(case, raw_message), - ) - ) - for result in results: - # create test result message - info: Dict[str, Any] = {} - info["information"] = {} - if test_section: - info["information"]["test_section"] = test_section - if data_disk: - info["information"]["data_disk"] = data_disk - info["information"]["test_details"] = str( - self.create_xfstest_stack_info( - result.name, test_section, str(result.status.name) - ) - ) - send_sub_test_result_message( - test_result=test_result, - test_case_name=result.name, - test_status=result.status, - test_message=result.message, - other_fields=info, - ) - - def check_test_results( - self, - log_path: Path, - test_section: str, - result: "TestResult", - data_disk: str = "", - ) -> None: - """ - About: This method is intended to be called by run_test method only. - This method will check the xfstests output and send subtest results - to the test result object. - This method depends on create_send_subtest_msg method to send - subtest results. - Parameters: - log_path: The path where the xfstests logs will be saved - test_section: The test group name used for testing - result: The test result object to which the subtest results will be sent - data_disk: The data disk used for testing ( Method partially implemented ) - """ - xfstests_path = self.get_xfstests_path() - console_log_results_path = xfstests_path / "xfstest.log" - results_path = xfstests_path / "results/check.log" - fail_cases_list: List[str] = [] - try: - if not self.node.shell.exists(console_log_results_path): - self._log.error( - f"Console log path {console_log_results_path} doesn't exist, please" - " check testing runs well or not." - ) - raise LisaException( - f"Console log path {console_log_results_path} doesn't exist, " - "please check testing runs well or not." - ) - else: - log_result = self.node.tools[Cat].run( - str(console_log_results_path), force_run=True, sudo=True - ) - log_result.assert_exit_code() - ansi_escape = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])") - raw_message = ansi_escape.sub("", log_result.stdout) - self.create_send_subtest_msg( - test_result=result, - raw_message=raw_message, - test_section=test_section, - data_disk=data_disk, - ) - - if not self.node.shell.exists(results_path): - self._log.error( - f"Result path {results_path} doesn't exist, please check testing" - " runs well or not." - ) - raise LisaException( - f"Result path {results_path} doesn't exist, please check testing" - " runs well or not." - ) - else: - results = self.node.tools[Cat].run( - str(results_path), force_run=True, sudo=True - ) - results.assert_exit_code() - pass_match = self.__all_pass_pattern.match(results.stdout) - if pass_match: - pass_count = pass_match.group("pass_count") - self._log.debug( - f"All pass in xfstests, total pass case count is {pass_count}." - ) - # Xperimental : Passedexception with message/content of XFSTestLog. - # Intent is to display XFSReport on HTML pager. - # TODO: Fix this not displaying output - raise PassedException( - f"No Failed cases found in xfstests.\n" - f"XFSTestLog: {raw_message}" - ) - - fail_match = self.__fail_pattern.match(results.stdout) - if fail_match: - assert fail_match - fail_count = fail_match.group("fail_count") - total_count = fail_match.group("total_count") - fail_cases_match = self.__fail_cases_pattern.match(results.stdout) - assert fail_cases_match - fail_info = "" - fail_cases = fail_cases_match.group("fail_cases") - for fail_case in fail_cases.split(): - fail_info += find_patterns_in_lines( - raw_message, [re.compile(f".*{fail_case}.*$", re.MULTILINE)] - )[0][0] - fail_cases_list = fail_cases.split() - raise LisaException( - f"Fail {fail_count} cases of total {total_count},\n fail cases" - f" {fail_cases},\n details: \n{fail_info}, please investigate." - ) - else: - # Mark the fail count as zero, else code will fail since we never - # fetch fail_count from regex.This variable is used in Finally block - fail_count = 0 - self._log.debug("No failed cases found in xfstests.") - finally: - self.save_xfstests_log(fail_cases_list, log_path, test_section) - results_folder = xfstests_path / "results/" - self.node.execute(f"rm -rf {results_folder}", sudo=True) - self.node.execute(f"rm -f {console_log_results_path}", sudo=True) - - def save_xfstests_log( - self, fail_cases_list: List[str], log_path: Path, test_section: str - ) -> None: - """ - About:This method is intended to be called by check_test_results method only. - This method will copy the output of XFSTest results to the Log folder of host - calling LISA. Files copied are xfsresult.log, check.log and all failed cases - files if they exist. - """ - # if "generic" == test_section: - # test_type = "xfs" - xfstests_path = self.get_xfstests_path() - self.node.tools[Chmod].update_folder(str(xfstests_path), "a+rwx", sudo=True) - if self.node.shell.exists(xfstests_path / "results/check.log"): - self.node.shell.copy_back( - xfstests_path / "results/check.log", - log_path / "xfstests/check.log", - ) - if self.node.shell.exists(xfstests_path / "xfstest.log"): - self.node.shell.copy_back( - xfstests_path / "xfstest.log", - log_path / "xfstests/xfstest.log", - ) - - for fail_case in fail_cases_list: - file_name = f"results/{test_section}/{fail_case}.out.bad" - result_path = xfstests_path / file_name - if self.node.shell.exists(result_path): - self.node.shell.copy_back(result_path, log_path / file_name) - else: - self._log.debug(f"{file_name} doesn't exist.") - file_name = f"results/{test_section}/{fail_case}.full" - result_path = xfstests_path / file_name - if self.node.shell.exists(result_path): - self.node.shell.copy_back(result_path, log_path / file_name) - else: - self._log.debug(f"{file_name} doesn't exist.") - file_name = f"results/{test_section}/{fail_case}.dmesg" - result_path = xfstests_path / file_name - if self.node.shell.exists(result_path): - self.node.shell.copy_back(result_path, log_path / file_name) - else: - self._log.debug(f"{file_name} doesn't exist.") - - def extract_case_content(self, case: str, raw_message: str) -> str: - """ - About:Support method to extract the content of a specific test case - from the xfstests output. Its intended for LISA use only. - The method takes in the following parameters: - case: The test case name for which the content is needed - raw_message: The raw message from the xfstests output - The method returns the content of the specific test case - Example: - xfstest.extract_case_content(case="generic/001", raw_message=raw_message) - """ - # Define the pattern to match the specific case and capture all - # content until the next / line - pattern = re.compile( - rf"({case}.*?)(?=" - r"\n[a-zA-Z]+/\d+|\nRan: |\nNot run: |\nFailures: |\nSECTION|\Z)", - re.DOTALL, - ) - # Search for the pattern in the raw_message - result = pattern.search(raw_message) - - # Extract the matched content and remove the {case} from the start - if result: - extracted_content = result.group(1) - cleaned_content = re.sub(rf"^{case}\s*", "", extracted_content) - # Remove any string in [ ] at the start of the cleaned_content - cleaned_content = re.sub(r"^\[.*?\]\s*", "", cleaned_content) - return cleaned_content.strip() - else: - return "" - - def extract_file_content(self, file_path: str) -> str: - """ - About: Support method to use the Cat command to extract file content. - This method is called by the create_xfstest_stack_info method. - Its purpose is to read the ASCII content of the file for further - tasks such as diff in case of failed cases. - Parameters: - file_path: The file path for which the content is needed - The method returns the content of the specific file - Example: - xfstest.extract_file_content(file_path="/path/to/file") - """ - # Use the cat tool to read the file content - if not Path(file_path).exists(): - self._log.debug(f"{file_path} doesn't exist.") - return "" - cat_tool = self.node.tools[Cat] - file_content = cat_tool.run(file_path, force_run=True) - return str(file_content.stdout) - - def create_xfstest_stack_info( - self, - case: str, - test_section: str, - test_status: str, - ) -> str: - """ - About:This method is used to look up the xfstests results directory and extract - dmesg and full/fail diff output for the given test case. - - Parameters: - case: The test case name for which the stack info is needed - test_section: The test group name used for testing - test_status: The test status for the given test case - Returns: - The method returns the stack info message for the given test case - Example: - xfstest.create_xfstest_stack_info( - case="generic/001", - test_section="xfs", - test_status="FAILED" - ) - Note: When running LISA in debug mode, expect verbose messages from 'ls' tool. - This is because the method checks for file existence per case in the results - dir. - This is normal behavior and can be ignored. We are working on reducing verbosity - of 'ls' calls to improve performance. - """ - - # Get XFSTest current path. we are looking at results/{test_type} directory here - xfstests_path = self.get_xfstests_path() - test_class = case.split("/")[0] - test_id = case.split("/")[1] - result_path = xfstests_path / f"results/{test_section}/{test_class}" - return_message: str = "" - # this needs to be fixed as it's spilling over to console output. - if self.node.tools[Ls].path_exists(str(result_path), sudo=True): - # Note. This will dump a lot of output on debug console screen. - # Only un-comment for debugging. - # self._log.debug( - # f"Found files in path {result_path} : " - # f"{self.node.tools[Ls].list(str(result_path), sudo=True)}" - # ) - # If passed, we only need DMESG output - if test_status == "PASSED": - dmesg_result = self.node.tools[Cat].run( - f"{result_path}/{test_id}.dmesg", force_run=True, sudo=True - ) - return_message = f"DMESG: {dmesg_result.stdout}" - # If failed, we need dmesg with diff output - elif test_status == "FAILED": - dmesg_result = self.node.tools[Cat].run( - f"{result_path}/{test_id}.dmesg", force_run=True, sudo=True - ) - full_out = result_path / f"{test_id}.full" - fail_out = result_path / f"{test_id}.out.bad" - # check of "full_out" and "fail_out" file exists - # Only then call diff tool. - if self.node.tools[Ls].path_exists( - str(full_out), sudo=True - ) and self.node.tools[Ls].path_exists(str(fail_out), sudo=True): - diff_result = self.node.tools[Diff].comparefiles( - src=full_out, - dest=fail_out, - ) - # else if full_out is null, return the fail_out file content. - # In some test cases, full_out is not generated due to permissions - # or other issues. However a fail file will always exists in such cases. - elif self.node.tools[Ls].path_exists(str(fail_out), sudo=True): - fail_result = self.node.tools[Cat].run( - f"{result_path}/{test_id}.out.bad", - force_run=True, - sudo=True, - ) - diff_result = fail_result.stdout - return_message = f"DIFF: {diff_result}\n\nDMESG: {dmesg_result}" - # return_message = f"DMESG: {dmesg_result.stdout}" - # No output is needed. Although we can add Dmesg in the future - elif test_status == "SKIPPED": - notrun_result = self.node.tools[Cat].run( - f"{result_path}/{test_id}.notrun", force_run=True, sudo=True - ) - return_message = f"NOTRUN: {notrun_result.stdout}" - else: - self._log.debug(f"No files found in path {result_path}") - return_message = f"No files found in path {result_path}" - self._log.debug( - f"Returning message from create_xfstest_stack_info : {return_message}" - ) - return return_message diff --git a/microsoft/testsuites/xfstests/xfstesting.py b/microsoft/testsuites/xfstests/xfstesting.py index e458b64304..f9ce64c399 100644 --- a/microsoft/testsuites/xfstests/xfstesting.py +++ b/microsoft/testsuites/xfstests/xfstesting.py @@ -17,18 +17,69 @@ search_space, simple_requirement, ) +from lisa.environment import Environment from lisa.features import Disk, Nvme from lisa.operating_system import BSD, CBLMariner, Oracle, Redhat, Windows from lisa.sut_orchestrator import AZURE, HYPERV -from lisa.sut_orchestrator.azure.features import AzureFileShare +from lisa.sut_orchestrator.azure.features import AzureFileShare, Nfs from lisa.sut_orchestrator.azure.platform_ import AzurePlatform from lisa.testsuite import TestResult from lisa.tools import Echo, FileSystem, KernelConfig, Mkfs, Mount, Parted -from lisa.util import BadEnvironmentStateException, generate_random_chars +from lisa.util import BadEnvironmentStateException, generate_random_chars, LisaException from microsoft.testsuites.xfstests.xfstests import Xfstests +# Global variables +# Section : NFS options. +_default_nfs_mount = "vers=4,minorversion=1,_netdev,nofail,sec=sys 0 0" +_default_nfs_excluded_tests: str = "" +_default_nfs_testcases: str = "" +# Section : SMB options. +_default_smb_mount = ( + "vers=3.11,dir_mode=0755,file_mode=0755,serverino,nosharesock" + ",mfsymlinks,max_channels=4,actimeo=30" +) +_default_smb_excluded_tests: str = ( + "generic/015 generic/019 generic/027 generic/034 generic/039 generic/040 " + "generic/041 generic/050 generic/056 generic/057 generic/059 generic/065 " + "generic/066 generic/067 generic/073 generic/076 generic/081 generic/083 " + "generic/090 generic/096 generic/101 generic/102 generic/104 generic/106 " + "generic/107 generic/108 generic/114 generic/204 generic/218 generic/223 " + "generic/224 generic/226 generic/250 generic/252 generic/269 generic/273 " + "generic/274 generic/275 generic/299 generic/300 generic/311 generic/312 " + "generic/320 generic/321 generic/322 generic/325 generic/335 generic/336 " + "generic/338 generic/341 generic/342 generic/343 generic/347 generic/348 " + "generic/361 generic/371 generic/376 generic/388 generic/405 generic/409 " + "generic/410 generic/411 generic/416 generic/418 generic/427 generic/441 " + "generic/442 generic/455 generic/456 generic/459 generic/466 generic/470 " + "generic/475 generic/481 generic/482 generic/483 generic/484 generic/487 " + "generic/488 generic/489 generic/500 generic/510 generic/512 generic/520 " + "generic/534 generic/535 generic/536 generic/547 generic/552 generic/557 " + "generic/558 generic/559 generic/560 generic/561 generic/562 generic/570 " + "generic/589 generic/619 generic/620 generic/640" +) +_default_smb_testcases: str = ( + "generic/001 generic/005 generic/006 generic/007 generic/010 generic/011 " + "generic/013 generic/014 generic/024 generic/028 generic/029 generic/030 " + "generic/036 generic/069 generic/070 generic/071 generic/074 generic/080 " + "generic/084 generic/086 generic/091 generic/095 generic/098 generic/100 " + "generic/109 generic/113 generic/117 generic/124 generic/125 generic/129 " + "generic/130 generic/132 generic/133 generic/135 generic/141 generic/169 " + "generic/184 generic/198 generic/207 generic/208 generic/210 generic/211 " + "generic/212 generic/214 generic/215 generic/221 generic/228 generic/239 " + "generic/240 generic/241 generic/246 generic/247 generic/248 generic/249 " + "generic/257 generic/258 generic/286 generic/306 generic/308 generic/310 " + "generic/313 generic/315 generic/339 generic/340 generic/344 generic/345 " + "generic/346 generic/354 generic/360 generic/391 generic/393 generic/394 " + "generic/406 generic/412 generic/422 generic/428 generic/432 generic/433 " + "generic/437 generic/443 generic/450 generic/451 generic/452 generic/460 " + "generic/464 generic/465 generic/469 generic/524 generic/528 generic/538 " + "generic/565 generic/567 generic/568" +) +# Section : Global options _scratch_folder = "/mnt/scratch" _test_folder = "/mnt/test" +_xfstests_repp = "https://git.kernel.org/pub/scm/fs/xfs/xfstests-dev.git" +_xfstests_branch = "master" def _prepare_data_disk( @@ -55,39 +106,89 @@ def _prepare_data_disk( node.execute(f"mkdir {mount_point}", sudo=True) -def _get_smb_version(node: Node) -> str: - if node.tools[KernelConfig].is_enabled("CONFIG_CIFS_SMB311"): - version = "3.1.1" +# DEPRECATED !!! +# This does not works on newer kernels. +# We recommend SMB 3.11 when possible, and only use 3.0 for really older kernels +# def _get_smb_version(node: Node) -> str: +# if node.tools[KernelConfig].is_enabled("CONFIG_CIFS_SMB311"): +# version = "3.1.1" +# else: +# version = "3.0" +# return version + + +def _deploy_azure_file_share( + node: Node, + environment: Environment, + file_share_name: str, + scratch_name: str, + azure_file_share: Any, + allow_shared_key_access: bool = True, + enable_private_endpoint: bool = True, + storage_account_sku: str = "Standard_LRS", + storage_account_kind: str = "StorageV2", + file_share_protocol: str = "SMB", + file_share_quota_in_gb: int = 500, +) -> Dict[str, str]: + """ + About: This method will provision azure file shares on a new // existing + storage account. + Returns: Dict[str, str] - A dictionary containing the file share names + and their respective URLs. + """ + if isinstance(azure_file_share, AzureFileShare): + file_share_protocol = "SMB" + elif isinstance(azure_file_share, Nfs): + file_share_protocol = "NFS" else: - version = "3.0" - return version - - -def _prepare_azure_file_share( - node: Node, - account_credential: Dict[str, str], - test_folders_share_dict: Dict[str, str], - fstab_info: str, -) -> None: - folder_path = node.get_pure_path("/etc/smbcredentials") - if node.shell.exists(folder_path): - node.execute(f"rm -rf {folder_path}", sudo=True) - node.shell.mkdir(folder_path) - file_path = node.get_pure_path("/etc/smbcredentials/lisa.cred") - echo = node.tools[Echo] - username = account_credential["account_name"] - password = account_credential["account_key"] - echo.write_to_file(f"username={username}", file_path, sudo=True, append=True) - echo.write_to_file(f"password={password}", file_path, sudo=True, append=True) - node.execute("cp -f /etc/fstab /etc/fstab_cifs", sudo=True) - for folder_name, share in test_folders_share_dict.items(): - node.execute(f"mkdir {folder_name}", sudo=True) - echo.write_to_file( - f"{share} {folder_name} cifs {fstab_info}", - node.get_pure_path("/etc/fstab"), - sudo=True, - append=True, - ) + raise LisaException("Unsupported file share protocol") + if file_share_protocol == "SMB": + fs_url_dict: Dict[str, str] = azure_file_share.create_file_share( + file_share_names=[file_share_name, scratch_name], + environment=environment, + sku=storage_account_sku, + kind=storage_account_kind, + allow_shared_key_access=allow_shared_key_access, + enable_private_endpoint=enable_private_endpoint, + quota_in_gb=file_share_quota_in_gb, + ) + test_folders_share_dict = { + _test_folder: fs_url_dict[file_share_name], + _scratch_folder: fs_url_dict[scratch_name], + } + azure_file_share.create_fileshare_folders(test_folders_share_dict) + # else: + # NFS yet to be implemented + return fs_url_dict + + +# DEPRECATED !!!! +# This instead exists in features.py +# def _prepare_azure_file_share_smb( +# node: Node, +# account_credential: Dict[str, str], +# test_folders_share_dict: Dict[str, str], +# fstab_info: str, +# ) -> None: +# folder_path = node.get_pure_path("/etc/smbcredentials") +# if node.shell.exists(folder_path): +# node.execute(f"rm -rf {folder_path}", sudo=True) +# node.shell.mkdir(folder_path) +# file_path = node.get_pure_path("/etc/smbcredentials/lisa.cred") +# echo = node.tools[Echo] +# username = account_credential["account_name"] +# password = account_credential["account_key"] +# echo.write_to_file(f"username={username}", file_path, sudo=True, append=True) +# echo.write_to_file(f"password={password}", file_path, sudo=True, append=True) +# node.execute("cp -f /etc/fstab /etc/fstab_cifs", sudo=True) +# for folder_name, share in test_folders_share_dict.items(): +# node.execute(f"mkdir {folder_name}", sudo=True) +# echo.write_to_file( +# f"{share} {folder_name} cifs {fstab_info}", +# node.get_pure_path("/etc/fstab"), +# sudo=True, +# append=True, +# ) @TestSuiteMetadata( @@ -101,7 +202,7 @@ def _prepare_azure_file_share( class Xfstesting(TestSuite): # Use xfstests benchmark to test the different types of data disk, # it will run many cases, so the runtime is longer than usual case. - TIME_OUT = 14400 + TIME_OUT = 21600 # TODO: will include btrfs/244 once the kernel contains below fix. # exclude btrfs/244 temporarily for below commit not picked up by distro vendor. # https://git.kernel.org/pub/scm/linux/kernel/git/next/linux-next.git/commit/fs/btrfs/volumes.c?id=e4571b8c5e9ffa1e85c0c671995bd4dcc5c75091 # noqa: E501 @@ -126,8 +227,9 @@ class Xfstesting(TestSuite): # generic/738 case might cause hang more than 4 hours on old kernel # TODO: will figure out the detailed reason of every excluded case. # exclude generic/680 for security reason. + # include generic/211 for testing excluded_tests = ( - "generic/211 generic/430 generic/431 generic/434 generic/738 xfs/438 xfs/490" + "generic/430 generic/431 generic/434 generic/738 xfs/438 xfs/490" + " btrfs/007 btrfs/178 btrfs/244 btrfs/262" + " xfs/030 xfs/032 xfs/050 xfs/052 xfs/106 xfs/107 xfs/122 xfs/132 xfs/138" + " xfs/144 xfs/148 xfs/175 xfs/191-input-validation xfs/289 xfs/293 xfs/424" @@ -258,7 +360,7 @@ def verify_xfs_standard_datadisk(self, log_path: Path, result: TestResult) -> No @TestCaseMetadata( description=""" This test case will run ext4 xfstests testing against - standard data disk with ext4 type system. + standard data disk with ext4 type system. """, requirement=simple_requirement( disk=schema.DiskOptionSettings( @@ -492,10 +594,12 @@ def verify_btrfs_nvme_datadisk(self, log_path: Path, result: TestResult) -> None description=""" This test case will run cifs xfstests testing against azure file share. - - Downgrading priority from 3 to 5. The file share relies on the - storage account key, which we cannot use currently. - Will change it back once file share works with MSI. + The case will provision storage account with private endpoint + and use access key // ntlmv2 for authentication. + This will change to MSI in the near future + Update the mount options via _default_smb_mount + Update the excluded cases via _default_smb_excluded_tests + Update the test cases via _default_smb_testcases """, requirement=simple_requirement( min_core_count=16, @@ -504,11 +608,17 @@ def verify_btrfs_nvme_datadisk(self, log_path: Path, result: TestResult) -> None ), timeout=TIME_OUT, use_new_environment=True, - priority=5, + priority=3, ) def verify_azure_file_share( self, log: Logger, log_path: Path, result: TestResult ) -> None: + """ + About: This test case will run cifs xfstests testing against + azure file share - premium . + The test will create a VM, and storage account with private endpoint. + The authentication currently uses the storage account key and NTLMv2. + """ environment = result.environment assert environment, "fail to get environment from testresult" assert isinstance(environment.platform, AzurePlatform) @@ -520,40 +630,41 @@ def verify_azure_file_share( xfstests = self._install_xfstests(node) azure_file_share = node.features[AzureFileShare] - version = azure_file_share.get_smb_version() - mount_opts = ( - f"-o vers={version},credentials=/etc/smbcredentials/lisa.cred" - ",dir_mode=0777,file_mode=0777,serverino" - ) - random_str = generate_random_chars(string.ascii_lowercase + string.digits, 10) file_share_name = f"lisa{random_str}fs" scratch_name = f"lisa{random_str}scratch" - - # fs_url_dict: Dict[str, str] = {file_share_name: "", scratch_name: ""} - try: - fs_url_dict = azure_file_share.create_file_share( - file_share_names=[file_share_name, scratch_name], - environment=environment, - ) - test_folders_share_dict = { - _test_folder: fs_url_dict[file_share_name], - _scratch_folder: fs_url_dict[scratch_name], - } - azure_file_share.create_fileshare_folders(test_folders_share_dict) - - self._execute_xfstests( - log_path, - xfstests, - result, - test_dev=fs_url_dict[file_share_name], - scratch_dev=fs_url_dict[scratch_name], - excluded_tests=self.excluded_tests, - mount_opts=mount_opts, - ) - finally: - # clean up resources after testing. - azure_file_share.delete_azure_fileshare([file_share_name, scratch_name]) + mount_opts = ( + f"-o {_default_smb_mount},credentials=/etc/smbcredentials/lisa.cred" + ) + fs_url_dict: Dict[str, str] = _deploy_azure_file_share( + node, + environment, + file_share_name, + scratch_name, + azure_file_share, + ) + # Create Xfstest config + xfstests.set_local_config( + scratch_dev=fs_url_dict[scratch_name], + scratch_mnt=_scratch_folder, + test_dev=fs_url_dict[file_share_name], + test_folder=_test_folder, + file_system="cifs", + test_section="cifs", + mount_opts=mount_opts, + testfs_mount_opts=mount_opts, + overwrite_config=True, + ) + # Create excluded test file + xfstests.set_excluded_tests(_default_smb_excluded_tests) + # run the test + xfstests.run_test( + test_section="cifs", + log_path=log_path, + result=result, + test_cases=_default_smb_testcases, + timeout=self.TIME_OUT - 30, + ) def after_case(self, log: Logger, **kwargs: Any) -> None: try: @@ -580,8 +691,10 @@ def _execute_xfstests( scratch_dev: str = "", file_system: FileSystem = FileSystem.xfs, test_type: str = "generic", + test_cases: str = "", excluded_tests: str = "", mount_opts: str = "", + testfs_mount_opts: str = "", ) -> None: environment = result.environment assert environment, "fail to get environment from testresult" @@ -617,18 +730,27 @@ def _execute_xfstests( ) xfstests.set_local_config( - scratch_dev, - _scratch_folder, - test_dev, - _test_folder, - test_type, - file_system.name, - mount_opts, + scratch_dev=scratch_dev, + scratch_mnt=_scratch_folder, + test_dev=test_dev, + test_folder=_test_folder, + file_system=file_system.name, + test_section=test_type, + mount_opts=mount_opts, + testfs_mount_opts=testfs_mount_opts, + overwrite_config=True, ) xfstests.set_excluded_tests(excluded_tests) # Reduce run_test timeout by 30s to let it complete before case Timeout # wait_processes interval in run_test is 10s, set to 30 for safety check - xfstests.run_test(test_type, log_path, result, data_disk, self.TIME_OUT - 30) + xfstests.run_test( + test_section=test_type, + log_path=log_path, + result=result, + data_disk=data_disk, + test_cases=test_cases, + timeout=self.TIME_OUT - 30, + ) def _install_xfstests(self, node: Node) -> Xfstests: try: diff --git a/microsoft/testsuites/xfstests/xfstests.py b/microsoft/testsuites/xfstests/xfstests.py index 5fa396e787..d4e4cfab71 100644 --- a/microsoft/testsuites/xfstests/xfstests.py +++ b/microsoft/testsuites/xfstests/xfstests.py @@ -3,7 +3,7 @@ import re from dataclasses import dataclass from pathlib import Path, PurePath -from typing import Any, Dict, List, Type, cast +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Type, cast from assertpy import assert_that @@ -18,20 +18,39 @@ Suse, Ubuntu, ) -from lisa.testsuite import TestResult -from lisa.tools import Cat, Chmod, Echo, Git, Make, Pgrep, Rm, Sed -from lisa.util import LisaException, UnsupportedDistroException, find_patterns_in_lines + +if TYPE_CHECKING: + from lisa.testsuite import TestResult + +from lisa.tools import Cat, Chmod, Diff, Echo, Git, Ls, Make, Pgrep, Rm, Sed +from lisa.util import ( + LisaException, + PassedException, + UnsupportedDistroException, + find_patterns_in_lines, +) @dataclass class XfstestsResult: name: str = "" status: TestStatus = TestStatus.QUEUED + message: str = "" class Xfstests(Tool): + """ + Xfstests - Filesystem testing tool. + installed (default) from https://git.kernel.org/pub/scm/fs/xfs/xfstests-dev.git + Mirrored daily from kernel.org repository. + For details, refer to https://github.com/kdave/xfstests/blob/master/README + """ + + # This is the default repo and branch for xfstests. + # Override this via _install method if needed. repo = "https://git.kernel.org/pub/scm/fs/xfs/xfstests-dev.git" - branch = "v2024.02.09" + branch = "master" + # these are dependencies for xfstests. Update on regular basis. common_dep = [ "acl", "attr", @@ -41,6 +60,7 @@ class Xfstests(Tool): "dos2unix", "dump", "e2fsprogs", + "e2fsprogs-devel", "gawk", "gcc", "libtool", @@ -48,12 +68,14 @@ class Xfstests(Tool): "make", "parted", "quota", + "quota-devel", "sed", "xfsdump", "xfsprogs", "indent", "python", "fio", + "dbench", ] debian_dep = [ "libacl1-dev", @@ -70,6 +92,9 @@ class Xfstests(Tool): "zlib1g-dev", "btrfs-tools", "btrfs-progs", + "libgdbm-compat-dev", + "liburing-dev", + "liburing2", ] fedora_dep = [ "libtool", @@ -85,6 +110,9 @@ class Xfstests(Tool): "btrfs-progs-devel", "llvm-ocaml-devel", "uuid-devel", + "libtool", + "e2fsprogs-devel", + "gdbm-devel", ] suse_dep = [ "btrfsprogs", @@ -115,25 +143,31 @@ class Xfstests(Tool): "psmisc", "perl-CPAN", ] + # Regular expression for parsing xfstests output + # Example: # Passed all 35 tests __all_pass_pattern = re.compile( r"([\w\W]*?)Passed all (?P\d+) tests", re.MULTILINE ) + # Example: # Failed 22 of 514 tests __fail_pattern = re.compile( r"([\w\W]*?)Failed (?P\d+) of (?P\d+) tests", re.MULTILINE, ) + # Example: # Failures: generic/079 generic/193 generic/230 generic/256 generic/314 generic/317 generic/318 generic/355 generic/382 generic/523 generic/536 generic/553 generic/554 generic/565 generic/566 generic/587 generic/594 generic/597 generic/598 generic/600 generic/603 generic/646 # noqa: E501 __fail_cases_pattern = re.compile( r"([\w\W]*?)Failures: (?P.*)", re.MULTILINE, ) + # Example: # Ran: generic/001 generic/002 generic/003 ... __all_cases_pattern = re.compile( r"([\w\W]*?)Ran: (?P.*)", re.MULTILINE, ) + # Example: # Not run: generic/110 generic/111 generic/115 ... __not_run_cases_pattern = re.compile( r"([\w\W]*?)Not run: (?P.*)", @@ -156,28 +190,72 @@ def dependencies(self) -> List[Type[Tool]]: def run_test( self, - test_type: str, + # test_type: str, log_path: Path, - result: TestResult, + result: "TestResult", + test_section: str, data_disk: str = "", + test_cases: str = "", timeout: int = 14400, ) -> None: - self.run_async( - f"-g {test_type}/quick -E exclude.txt > xfstest.log 2>&1", - sudo=True, - shell=True, - force_run=True, - cwd=self.get_xfstests_path(), + """About: This method runs XFSTest on a given node with the specified + test group and test cases.If test_section is not specified , test is + run with "generic/quick" classification and XFS environment variables. + If test_section is specified, test is run with the specified test group + and XFS environment variables from local.config.If test_cases is specified, + only the specified test cases are run.If empty, all test cases barring + exclude.txt entries are run.Runtime is set to 4 hours by default, + but can be overridden by the user.This method after running xfstest + will parse the output and sends subtest results to the test result object. + Parameters: + log_path: The path where the xfstests logs will be saved + result: The LISA test result object to which the subtest results will be sent + test_section: The test group name to be used for testing. + Defaults to "generic/quick" + note: if specified, test_section must exist in local.config + data_disk: The data disk used for testing + test_cases: The test cases to be run. If empty, all installed test cases + barring exclude.txt entries are run + timeout: The time in seconds after which the test will be timed out. + Defaults to 4 hours. + Example: + xfstest.run_test( + log_path=Path("/tmp/xfstests"), + result=test_result, + test_section="generic/quick", + data_disk="/dev/sdb", + test_cases="generic/001 generic/002", + timeout=14400, ) + """ + # if Test group is specified, and exists in local.config, run tests. + if test_section: + self.run_async( + f"-s {test_section} -E exclude.txt {test_cases} > xfstest.log 2>&1", + sudo=True, + shell=True, + force_run=True, + cwd=self.get_xfstests_path(), + ) + # Else run generic quick test. This is not recommended. + else: + self.run_async( + f"-g generic/quick -E exclude.txt {test_cases} > xfstest.log 2>&1", + sudo=True, + shell=True, + force_run=True, + cwd=self.get_xfstests_path(), + ) pgrep = self.node.tools[Pgrep] # this is the actual process name, when xfstests runs. + # monitor till process completes or timesout try: pgrep.wait_processes("check", timeout=timeout) finally: self.check_test_results( log_path=log_path, - test_type=test_type, + test_section=test_section if test_section else "generic", result=result, data_disk=data_disk, ) @@ -187,6 +265,12 @@ def _initialize(self, *args: Any, **kwargs: Any) -> None: self._code_path = self.get_tool_path(use_global=True) / "xfstests-dev" def _install_dep(self) -> None: + """ + About: This method will install dependencies based on OS. + Dependencies are fetched from the common arrays such as + common_dep, debian_dep, fedora_dep, suse_dep, mariner_dep. + If the OS is not supported, a LisaException is raised. + """ posix_os: Posix = cast(Posix, self.node.os) # install dependency packages package_list = [] @@ -275,12 +359,32 @@ def _add_test_users(self) -> None: self.node.execute("useradd 123456-fsgqa", sudo=True) self.node.execute("useradd fsgqa2", sudo=True) - def _install(self) -> bool: + def _install( + self, + branch: Optional[str] = None, + repo: Optional[str] = None, + ) -> bool: + """ + About:This method will download and install XFSTest on a given node. + Supported OS are Redhat, Debian, Suse, Ubuntu and CBLMariner3. + Dependencies are installed based on the OS type from _install_dep method. + The test users are added to the node using _add_test_users method. + This method allows you to specify custom repo and branch for xfstest. + Else this defaults to: + https://git.kernel.org/pub/scm/fs/xfs/xfstests-dev.git:master + Example: + xfstest._install( + branch="master", + repo="https://git.kernel.org/pub/scm/fs/xfs/xfstests-dev.git" + ) + """ + branch = branch or self.branch + repo = repo or self.repo self._install_dep() self._add_test_users() tool_path = self.get_tool_path(use_global=True) git = self.node.tools[Git] - git.clone(url=self.repo, cwd=tool_path, ref=self.branch) + git.clone(url=repo, cwd=tool_path, ref=branch) make = self.node.tools[Make] code_path = tool_path.joinpath("xfstests-dev") @@ -299,49 +403,109 @@ def get_xfstests_path(self) -> PurePath: def set_local_config( self, + file_system: str, scratch_dev: str, scratch_mnt: str, test_dev: str, test_folder: str, - test_type: str, - fs_type: str, + test_section: str = "", mount_opts: str = "", + testfs_mount_opts: str = "", + additional_parameters: Optional[Dict[str, str]] = None, + overwrite_config: bool = False, ) -> None: + """ + About: This method will create // append a local.config file in the install dir + local.config is used by XFStest to set global as well as testgroup options + Note:You can call this method multiple times to create multiple sections. + The code does not checks for duplicate section names, so that is the users + responsibility. + Also take note of how options are carried between sectoins, that include the + sections which are not going to be run. + Recommend going through link: + https://github.com/kdave/xfstests/blob/master/README.config-sections + for more details on how to use local.config + Parameters: + scratch_dev (str) : (M)The scratch device to be used for testing + scratch_mnt (str) : (M)The scratch mount point to be used for testing + test_dev (str) : (M)The test device to be used for testing + test_folder (str) : (M)The test folder to be used for testing + file_system (str) : (M)The filesystem type to be tested + test_section (str) : (O)The test group name to be used for testing. + Defaults to the file_system + mount_opts (str) : (O)The mount options to be used for testing. + Empty signifies disk target + testfs_mount_opts (str): (O)The test filesystem mount options to be used for + testing.Defaults to mount_opts + additional_parameters (dict): (O)Additional parameters (dict) to be used for + testing + overwrite_config (bool): (O)If True, the existing local.config file will be + overwritten + Example: + xfstest.set_local_config( + scratch_dev="/dev/sdb", + scratch_mnt="/mnt/scratch", + test_dev="/dev/sdc", + test_folder="/mnt/test", + file_system="xfs", + test_section="xfs-custom", + mount_opts="noatime", + testfs_mount_opts="noatime", + additional_parameters={"TEST_DEV2": "/dev/sdd"}, + overwrite_config=True + ) + Note: This method will by default enforce dmesg logging. + All tests will have a corresponding dmesg log file in output folder. + """ xfstests_path = self.get_xfstests_path() config_path = xfstests_path.joinpath("local.config") - if self.node.shell.exists(config_path): + # If overwrite is specified, remove the existing config file and start afresh + if overwrite_config and self.node.shell.exists(config_path): self.node.shell.remove(config_path) - + # If groupname is not provided, use Filesystem name. + # Warning !!!: if you create multiple sections, + # you must specify unique group names for each + if not test_section: + test_section = file_system echo = self.node.tools[Echo] - if mount_opts: - content = "\n".join( - [ - "[cifs]", - "FSTYP=cifs", - f"TEST_FS_MOUNT_OPTS=''{mount_opts}''", - f"MOUNT_OPTIONS=''{mount_opts}''", - ] - ) - else: - content = "\n".join( - [ - f"[{test_type}]", - f"FSTYP={fs_type}", - ] - ) - echo.write_to_file(content, config_path, append=True) - + # create the core config section content = "\n".join( [ + f"[{test_section}]", + f"FSTYPE={file_system}", f"SCRATCH_DEV={scratch_dev}", f"SCRATCH_MNT={scratch_mnt}", f"TEST_DEV={test_dev}", f"TEST_DIR={test_folder}", ] ) + + # if Mount options are provided, append to the end of 'content' + if mount_opts: + content += f"\nMOUNT_OPTIONS='{mount_opts}'" + if testfs_mount_opts: + content += f"\nTEST_FS_MOUNT_OPTS='{testfs_mount_opts}'" + # if additional parameters are provided, append to the end of 'content' + if additional_parameters is not None: + for key, value in additional_parameters.items(): + content += f"\n{key}={value}" + # Finally enable DMESG + content += "\nKEEP_DMESG=yes" + # Append to the file if exists, else create a new file if none echo.write_to_file(content, config_path, append=True) def set_excluded_tests(self, exclude_tests: str) -> None: + """ + About:This method will create an exclude.txt file with the provided test cases. + The exclude.txt file is used by XFStest to exclude specific test cases from + running. + The method takes in the following parameters: + exclude_tests: The test cases to be excluded from testing + Parameters: + exclude_tests (str): The test cases to be excluded from testing + Example Usage: + xfstest.set_excluded_tests(exclude_tests="generic/001 generic/002") + """ if exclude_tests: xfstests_path = self.get_xfstests_path() exclude_file_path = xfstests_path.joinpath("exclude.txt") @@ -351,13 +515,23 @@ def set_excluded_tests(self, exclude_tests: str) -> None: for exclude_test in exclude_tests.split(): echo.write_to_file(exclude_test, exclude_file_path, append=True) + # add more usable details in subtest additional information field def create_send_subtest_msg( self, - test_result: TestResult, + test_result: "TestResult", raw_message: str, - test_type: str, + test_section: str, data_disk: str, ) -> None: + """ + About:This method is internal to LISA and is not intended for direct calls. + This method will create and send subtest results to the test result object. + Parmaeters: + test_result: The test result object to which the subtest results will be sent + raw_message: The raw message from the xfstests output + test_section: The test group name used for testing + data_disk: The data disk used for testing. ( method is partially implemented ) + """ all_cases_match = self.__all_cases_pattern.match(raw_message) assert all_cases_match, "fail to find run cases from xfstests output" all_cases = (all_cases_match.group("all_cases")).split() @@ -374,42 +548,83 @@ def create_send_subtest_msg( ] results: List[XfstestsResult] = [] for case in fail_cases: - results.append(XfstestsResult(case, TestStatus.FAILED)) + results.append( + XfstestsResult( + name=case, + status=TestStatus.FAILED, + message=self.extract_case_content(case, raw_message), + ) + ) for case in pass_cases: - results.append(XfstestsResult(case, TestStatus.PASSED)) + results.append( + XfstestsResult( + name=case, + status=TestStatus.PASSED, + message=self.extract_case_content(case, raw_message), + ) + ) for case in not_run_cases: - results.append(XfstestsResult(case, TestStatus.SKIPPED)) + results.append( + XfstestsResult( + name=case, + status=TestStatus.SKIPPED, + message=self.extract_case_content(case, raw_message), + ) + ) for result in results: # create test result message info: Dict[str, Any] = {} info["information"] = {} - info["information"]["test_type"] = test_type - info["information"]["data_disk"] = data_disk + if test_section: + info["information"]["test_section"] = test_section + if data_disk: + info["information"]["data_disk"] = data_disk + info["information"]["test_details"] = str( + self.create_xfstest_stack_info( + result.name, test_section, str(result.status.name) + ) + ) send_sub_test_result_message( test_result=test_result, test_case_name=result.name, test_status=result.status, + test_message=result.message, other_fields=info, ) def check_test_results( self, log_path: Path, - test_type: str, - result: TestResult, + test_section: str, + result: "TestResult", data_disk: str = "", ) -> None: + """ + About: This method is intended to be called by run_test method only. + This method will check the xfstests output and send subtest results + to the test result object. + This method depends on create_send_subtest_msg method to send + subtest results. + Parameters: + log_path: The path where the xfstests logs will be saved + test_section: The test group name used for testing + result: The test result object to which the subtest results will be sent + data_disk: The data disk used for testing ( Method partially implemented ) + """ xfstests_path = self.get_xfstests_path() console_log_results_path = xfstests_path / "xfstest.log" results_path = xfstests_path / "results/check.log" fail_cases_list: List[str] = [] - try: if not self.node.shell.exists(console_log_results_path): self._log.error( f"Console log path {console_log_results_path} doesn't exist, please" " check testing runs well or not." ) + raise LisaException( + f"Console log path {console_log_results_path} doesn't exist, " + "please check testing runs well or not." + ) else: log_result = self.node.tools[Cat].run( str(console_log_results_path), force_run=True, sudo=True @@ -417,13 +632,22 @@ def check_test_results( log_result.assert_exit_code() ansi_escape = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])") raw_message = ansi_escape.sub("", log_result.stdout) - self.create_send_subtest_msg(result, raw_message, test_type, data_disk) + self.create_send_subtest_msg( + test_result=result, + raw_message=raw_message, + test_section=test_section, + data_disk=data_disk, + ) if not self.node.shell.exists(results_path): self._log.error( f"Result path {results_path} doesn't exist, please check testing" " runs well or not." ) + raise LisaException( + f"Result path {results_path} doesn't exist, please check testing" + " runs well or not." + ) else: results = self.node.tools[Cat].run( str(results_path), force_run=True, sudo=True @@ -435,36 +659,54 @@ def check_test_results( self._log.debug( f"All pass in xfstests, total pass case count is {pass_count}." ) - return + # Xperimental : Passedexception with message/content of XFSTestLog. + # Intent is to display XFSReport on HTML pager. + # TODO: Fix this not displaying output + raise PassedException( + f"No Failed cases found in xfstests.\n" + f"XFSTestLog: {raw_message}" + ) fail_match = self.__fail_pattern.match(results.stdout) - assert fail_match - fail_count = fail_match.group("fail_count") - total_count = fail_match.group("total_count") - fail_cases_match = self.__fail_cases_pattern.match(results.stdout) - assert fail_cases_match - fail_info = "" - fail_cases = fail_cases_match.group("fail_cases") - for fail_case in fail_cases.split(): - fail_info += find_patterns_in_lines( - raw_message, [re.compile(f".*{fail_case}.*$", re.MULTILINE)] - )[0][0] - fail_cases_list = fail_cases.split() - raise LisaException( - f"Fail {fail_count} cases of total {total_count}, fail cases" - f" {fail_cases}, details {fail_info}, please investigate." - ) + if fail_match: + assert fail_match + fail_count = fail_match.group("fail_count") + total_count = fail_match.group("total_count") + fail_cases_match = self.__fail_cases_pattern.match(results.stdout) + assert fail_cases_match + fail_info = "" + fail_cases = fail_cases_match.group("fail_cases") + for fail_case in fail_cases.split(): + fail_info += find_patterns_in_lines( + raw_message, [re.compile(f".*{fail_case}.*$", re.MULTILINE)] + )[0][0] + fail_cases_list = fail_cases.split() + raise LisaException( + f"Fail {fail_count} cases of total {total_count},\n fail cases" + f" {fail_cases},\n details: \n{fail_info}, please investigate." + ) + else: + # Mark the fail count as zero, else code will fail since we never + # fetch fail_count from regex.This variable is used in Finally block + fail_count = 0 + self._log.debug("No failed cases found in xfstests.") finally: - self.save_xfstests_log(fail_cases_list, log_path, test_type) + self.save_xfstests_log(fail_cases_list, log_path, test_section) results_folder = xfstests_path / "results/" self.node.execute(f"rm -rf {results_folder}", sudo=True) self.node.execute(f"rm -f {console_log_results_path}", sudo=True) def save_xfstests_log( - self, fail_cases_list: List[str], log_path: Path, test_type: str + self, fail_cases_list: List[str], log_path: Path, test_section: str ) -> None: - if "generic" == test_type: - test_type = "xfs" + """ + About:This method is intended to be called by check_test_results method only. + This method will copy the output of XFSTest results to the Log folder of host + calling LISA. Files copied are xfsresult.log, check.log and all failed cases + files if they exist. + """ + # if "generic" == test_section: + # test_type = "xfs" xfstests_path = self.get_xfstests_path() self.node.tools[Chmod].update_folder(str(xfstests_path), "a+rwx", sudo=True) if self.node.shell.exists(xfstests_path / "results/check.log"): @@ -479,15 +721,163 @@ def save_xfstests_log( ) for fail_case in fail_cases_list: - file_name = f"results/{test_type}/{fail_case}.out.bad" + file_name = f"results/{test_section}/{fail_case}.out.bad" + result_path = xfstests_path / file_name + if self.node.shell.exists(result_path): + self.node.shell.copy_back(result_path, log_path / file_name) + else: + self._log.debug(f"{file_name} doesn't exist.") + file_name = f"results/{test_section}/{fail_case}.full" result_path = xfstests_path / file_name if self.node.shell.exists(result_path): self.node.shell.copy_back(result_path, log_path / file_name) else: self._log.debug(f"{file_name} doesn't exist.") - file_name = f"results/{test_type}/{fail_case}.full" + file_name = f"results/{test_section}/{fail_case}.dmesg" result_path = xfstests_path / file_name if self.node.shell.exists(result_path): self.node.shell.copy_back(result_path, log_path / file_name) else: self._log.debug(f"{file_name} doesn't exist.") + + def extract_case_content(self, case: str, raw_message: str) -> str: + """ + About:Support method to extract the content of a specific test case + from the xfstests output. Its intended for LISA use only. + The method takes in the following parameters: + case: The test case name for which the content is needed + raw_message: The raw message from the xfstests output + The method returns the content of the specific test case + Example: + xfstest.extract_case_content(case="generic/001", raw_message=raw_message) + """ + # Define the pattern to match the specific case and capture all + # content until the next / line + pattern = re.compile( + rf"({case}.*?)(?=" + r"\n[a-zA-Z]+/\d+|\nRan: |\nNot run: |\nFailures: |\nSECTION|\Z)", + re.DOTALL, + ) + # Search for the pattern in the raw_message + result = pattern.search(raw_message) + + # Extract the matched content and remove the {case} from the start + if result: + extracted_content = result.group(1) + cleaned_content = re.sub(rf"^{case}\s*", "", extracted_content) + # Remove any string in [ ] at the start of the cleaned_content + cleaned_content = re.sub(r"^\[.*?\]\s*", "", cleaned_content) + return cleaned_content.strip() + else: + return "" + + def extract_file_content(self, file_path: str) -> str: + """ + About: Support method to use the Cat command to extract file content. + This method is called by the create_xfstest_stack_info method. + Its purpose is to read the ASCII content of the file for further + tasks such as diff in case of failed cases. + Parameters: + file_path: The file path for which the content is needed + The method returns the content of the specific file + Example: + xfstest.extract_file_content(file_path="/path/to/file") + """ + # Use the cat tool to read the file content + if not Path(file_path).exists(): + self._log.debug(f"{file_path} doesn't exist.") + return "" + cat_tool = self.node.tools[Cat] + file_content = cat_tool.run(file_path, force_run=True) + return str(file_content.stdout) + + def create_xfstest_stack_info( + self, + case: str, + test_section: str, + test_status: str, + ) -> str: + """ + About:This method is used to look up the xfstests results directory and extract + dmesg and full/fail diff output for the given test case. + + Parameters: + case: The test case name for which the stack info is needed + test_section: The test group name used for testing + test_status: The test status for the given test case + Returns: + The method returns the stack info message for the given test case + Example: + xfstest.create_xfstest_stack_info( + case="generic/001", + test_section="xfs", + test_status="FAILED" + ) + Note: When running LISA in debug mode, expect verbose messages from 'ls' tool. + This is because the method checks for file existence per case in the results + dir. + This is normal behavior and can be ignored. We are working on reducing verbosity + of 'ls' calls to improve performance. + """ + + # Get XFSTest current path. we are looking at results/{test_type} directory here + xfstests_path = self.get_xfstests_path() + test_class = case.split("/")[0] + test_id = case.split("/")[1] + result_path = xfstests_path / f"results/{test_section}/{test_class}" + return_message: str = "" + # this needs to be fixed as it's spilling over to console output. + if self.node.tools[Ls].path_exists(str(result_path), sudo=True): + # Note. This will dump a lot of output on debug console screen. + # Only un-comment for debugging. + # self._log.debug( + # f"Found files in path {result_path} : " + # f"{self.node.tools[Ls].list(str(result_path), sudo=True)}" + # ) + # If passed, we only need DMESG output + if test_status == "PASSED": + dmesg_result = self.node.tools[Cat].run( + f"{result_path}/{test_id}.dmesg", force_run=True, sudo=True + ) + return_message = f"DMESG: {dmesg_result.stdout}" + # If failed, we need dmesg with diff output + elif test_status == "FAILED": + dmesg_result = self.node.tools[Cat].run( + f"{result_path}/{test_id}.dmesg", force_run=True, sudo=True + ) + full_out = result_path / f"{test_id}.full" + fail_out = result_path / f"{test_id}.out.bad" + # check of "full_out" and "fail_out" file exists + # Only then call diff tool. + if self.node.tools[Ls].path_exists( + str(full_out), sudo=True + ) and self.node.tools[Ls].path_exists(str(fail_out), sudo=True): + diff_result = self.node.tools[Diff].comparefiles( + src=full_out, + dest=fail_out, + ) + # else if full_out is null, return the fail_out file content. + # In some test cases, full_out is not generated due to permissions + # or other issues. However a fail file will always exists in such cases. + elif self.node.tools[Ls].path_exists(str(fail_out), sudo=True): + fail_result = self.node.tools[Cat].run( + f"{result_path}/{test_id}.out.bad", + force_run=True, + sudo=True, + ) + diff_result = fail_result.stdout + return_message = f"DIFF: {diff_result}\n\nDMESG: {dmesg_result}" + # return_message = f"DMESG: {dmesg_result.stdout}" + # No output is needed. Although we can add Dmesg in the future + elif test_status == "SKIPPED": + notrun_result = self.node.tools[Cat].run( + f"{result_path}/{test_id}.notrun", force_run=True, sudo=True + ) + return_message = f"NOTRUN: {notrun_result.stdout}" + else: + self._log.debug(f"No files found in path {result_path}") + return_message = f"No files found in path {result_path}" + self._log.debug( + f"Returning message from create_xfstest_stack_info : {return_message}" + ) + return return_message From 2a21ea9457328ee1609a4c5041944870b36c8e71 Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Fri, 7 Mar 2025 22:13:05 +0530 Subject: [PATCH 11/37] Update xfstesting.py --- microsoft/testsuites/xfstests/xfstesting.py | 25 +++++++++++---------- 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/microsoft/testsuites/xfstests/xfstesting.py b/microsoft/testsuites/xfstests/xfstesting.py index f9ce64c399..07854dc72e 100644 --- a/microsoft/testsuites/xfstests/xfstesting.py +++ b/microsoft/testsuites/xfstests/xfstesting.py @@ -25,7 +25,7 @@ from lisa.sut_orchestrator.azure.platform_ import AzurePlatform from lisa.testsuite import TestResult from lisa.tools import Echo, FileSystem, KernelConfig, Mkfs, Mount, Parted -from lisa.util import BadEnvironmentStateException, generate_random_chars, LisaException +from lisa.util import BadEnvironmentStateException, LisaException, generate_random_chars from microsoft.testsuites.xfstests.xfstests import Xfstests # Global variables @@ -108,7 +108,8 @@ def _prepare_data_disk( # DEPRECATED !!! # This does not works on newer kernels. -# We recommend SMB 3.11 when possible, and only use 3.0 for really older kernels +# Pls see: +# https://lists.samba.org/archive/samba-technical/2018-June/128806.html # def _get_smb_version(node: Node) -> str: # if node.tools[KernelConfig].is_enabled("CONFIG_CIFS_SMB311"): # version = "3.1.1" @@ -143,15 +144,15 @@ def _deploy_azure_file_share( else: raise LisaException("Unsupported file share protocol") if file_share_protocol == "SMB": - fs_url_dict: Dict[str, str] = azure_file_share.create_file_share( - file_share_names=[file_share_name, scratch_name], - environment=environment, - sku=storage_account_sku, - kind=storage_account_kind, - allow_shared_key_access=allow_shared_key_access, - enable_private_endpoint=enable_private_endpoint, - quota_in_gb=file_share_quota_in_gb, - ) + fs_url_dict: Dict[str, str] = azure_file_sharoe.create_file_share( + file_share_names=[file_share_name, scratcwh_name], + environment=environment, + sku=storage_account_sku, + kind=storage_account_kind, + allow_shared_key_access=allow_shared_key_access, + enable_private_endpoint=enable_private_endpoint, + quota_in_gb=file_share_quota_in_gb, + ) test_folders_share_dict = { _test_folder: fs_url_dict[file_share_name], _scratch_folder: fs_url_dict[scratch_name], @@ -163,7 +164,7 @@ def _deploy_azure_file_share( # DEPRECATED !!!! -# This instead exists in features.py +# This n exists in features.py # def _prepare_azure_file_share_smb( # node: Node, # account_credential: Dict[str, str], From d3310b39b075daa73a424a0aff515e909f1ce4e2 Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Fri, 7 Mar 2025 22:16:01 +0530 Subject: [PATCH 12/37] Update xfstesting.py --- microsoft/testsuites/xfstests/xfstesting.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/microsoft/testsuites/xfstests/xfstesting.py b/microsoft/testsuites/xfstests/xfstesting.py index 07854dc72e..7c664c30e7 100644 --- a/microsoft/testsuites/xfstests/xfstesting.py +++ b/microsoft/testsuites/xfstests/xfstesting.py @@ -109,7 +109,7 @@ def _prepare_data_disk( # DEPRECATED !!! # This does not works on newer kernels. # Pls see: -# https://lists.samba.org/archive/samba-technical/2018-June/128806.html +# https://lists.samba.org/archive/samba-technical/2018-June/128806.html # def _get_smb_version(node: Node) -> str: # if node.tools[KernelConfig].is_enabled("CONFIG_CIFS_SMB311"): # version = "3.1.1" @@ -144,8 +144,8 @@ def _deploy_azure_file_share( else: raise LisaException("Unsupported file share protocol") if file_share_protocol == "SMB": - fs_url_dict: Dict[str, str] = azure_file_sharoe.create_file_share( - file_share_names=[file_share_name, scratcwh_name], + fs_url_dict: Dict[str, str] = azure_file_share.create_file_share( + file_share_names=[file_share_name, scratch_name], environment=environment, sku=storage_account_sku, kind=storage_account_kind, From 4af62d6b664a144273b9fe20f68b534618f6edf6 Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Fri, 7 Mar 2025 22:17:51 +0530 Subject: [PATCH 13/37] Update xfstesting.py --- microsoft/testsuites/xfstests/xfstesting.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/microsoft/testsuites/xfstests/xfstesting.py b/microsoft/testsuites/xfstests/xfstesting.py index 7c664c30e7..8812dd134b 100644 --- a/microsoft/testsuites/xfstests/xfstesting.py +++ b/microsoft/testsuites/xfstests/xfstesting.py @@ -119,7 +119,7 @@ def _prepare_data_disk( def _deploy_azure_file_share( - node: Node, + # node: Node, environment: Environment, file_share_name: str, scratch_name: str, From 4136441c7093a063a19707bcaacdee1c98bc5793 Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Fri, 7 Mar 2025 22:20:02 +0530 Subject: [PATCH 14/37] Update xfstesting.py --- microsoft/testsuites/xfstests/xfstesting.py | 1 - 1 file changed, 1 deletion(-) diff --git a/microsoft/testsuites/xfstests/xfstesting.py b/microsoft/testsuites/xfstests/xfstesting.py index 8812dd134b..4f84638f7c 100644 --- a/microsoft/testsuites/xfstests/xfstesting.py +++ b/microsoft/testsuites/xfstests/xfstesting.py @@ -119,7 +119,6 @@ def _prepare_data_disk( def _deploy_azure_file_share( - # node: Node, environment: Environment, file_share_name: str, scratch_name: str, From bd5b9d11942657a94cb855927f1c290da5778648 Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Sat, 8 Mar 2025 13:36:26 +0530 Subject: [PATCH 15/37] Update xfstesting.py --- microsoft/testsuites/xfstests/xfstesting.py | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/microsoft/testsuites/xfstests/xfstesting.py b/microsoft/testsuites/xfstests/xfstesting.py index 4f84638f7c..8a4fe8f8ef 100644 --- a/microsoft/testsuites/xfstests/xfstesting.py +++ b/microsoft/testsuites/xfstests/xfstesting.py @@ -119,6 +119,7 @@ def _prepare_data_disk( def _deploy_azure_file_share( + node: Node, environment: Environment, file_share_name: str, scratch_name: str, @@ -637,11 +638,11 @@ def verify_azure_file_share( f"-o {_default_smb_mount},credentials=/etc/smbcredentials/lisa.cred" ) fs_url_dict: Dict[str, str] = _deploy_azure_file_share( - node, - environment, - file_share_name, - scratch_name, - azure_file_share, + node=node, + environment=environment, + file_share_name=file_share_name, + scratch_name=scratch_name, + azure_file_share=azure_file_share, ) # Create Xfstest config xfstests.set_local_config( From c0576f5d5c60eef8e642baad7edb168f94a50c7d Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Sat, 8 Mar 2025 13:42:08 +0530 Subject: [PATCH 16/37] Update xfstesting.py --- microsoft/testsuites/xfstests/xfstesting.py | 24 ++++++++++----------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/microsoft/testsuites/xfstests/xfstesting.py b/microsoft/testsuites/xfstests/xfstesting.py index 8a4fe8f8ef..fb7d20063f 100644 --- a/microsoft/testsuites/xfstests/xfstesting.py +++ b/microsoft/testsuites/xfstests/xfstesting.py @@ -119,17 +119,17 @@ def _prepare_data_disk( def _deploy_azure_file_share( - node: Node, - environment: Environment, - file_share_name: str, - scratch_name: str, - azure_file_share: Any, - allow_shared_key_access: bool = True, - enable_private_endpoint: bool = True, - storage_account_sku: str = "Standard_LRS", - storage_account_kind: str = "StorageV2", - file_share_protocol: str = "SMB", - file_share_quota_in_gb: int = 500, + node: Node, + environment: Environment, + file_share_name: str, + scratch_name: str, + azure_file_share: Any, + allow_shared_key_access: bool = True, + enable_private_endpoint: bool = True, + storage_account_sku: str = "Standard_LRS", + storage_account_kind: str = "StorageV2", + file_share_protocol: str = "SMB", + file_share_quota_in_gb: int = 500, ) -> Dict[str, str]: """ About: This method will provision azure file shares on a new // existing @@ -159,7 +159,7 @@ def _deploy_azure_file_share( } azure_file_share.create_fileshare_folders(test_folders_share_dict) # else: - # NFS yet to be implemented + # NFS yet to be implemented return fs_url_dict From 4ca5981fadd860f45c8fd757762a4d44db10d08b Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Sat, 8 Mar 2025 18:43:22 +0530 Subject: [PATCH 17/37] Update xfstests.py --- microsoft/testsuites/xfstests/xfstests.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/microsoft/testsuites/xfstests/xfstests.py b/microsoft/testsuites/xfstests/xfstests.py index d4e4cfab71..b04cae1989 100644 --- a/microsoft/testsuites/xfstests/xfstests.py +++ b/microsoft/testsuites/xfstests/xfstests.py @@ -472,7 +472,7 @@ def set_local_config( content = "\n".join( [ f"[{test_section}]", - f"FSTYPE={file_system}", + f"FSTYP={file_system}", f"SCRATCH_DEV={scratch_dev}", f"SCRATCH_MNT={scratch_mnt}", f"TEST_DEV={test_dev}", From 0cee73c5aed78eb20d98cb554e01c3076310d37f Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Tue, 11 Mar 2025 08:50:43 +0530 Subject: [PATCH 18/37] Update xfstesting.py --- microsoft/testsuites/xfstests/xfstesting.py | 59 ++++++--------------- 1 file changed, 15 insertions(+), 44 deletions(-) diff --git a/microsoft/testsuites/xfstests/xfstesting.py b/microsoft/testsuites/xfstests/xfstesting.py index fb7d20063f..f68481ce28 100644 --- a/microsoft/testsuites/xfstests/xfstesting.py +++ b/microsoft/testsuites/xfstests/xfstesting.py @@ -73,7 +73,9 @@ "generic/406 generic/412 generic/422 generic/428 generic/432 generic/433 " "generic/437 generic/443 generic/450 generic/451 generic/452 generic/460 " "generic/464 generic/465 generic/469 generic/524 generic/528 generic/538 " - "generic/565 generic/567 generic/568" + "generic/565 generic/567 generic/568 generic/586 generic/590 generic/591 " + "generic/598 generic/599 generic/604 generic/609 generic/615 generic/632 " + "generic/634 generic/635 generic/637 generic/638 generic/639 " ) # Section : Global options _scratch_folder = "/mnt/scratch" @@ -142,7 +144,8 @@ def _deploy_azure_file_share( elif isinstance(azure_file_share, Nfs): file_share_protocol = "NFS" else: - raise LisaException("Unsupported file share protocol") + raise LisaException(f"Unsupported file share type: {type(azure_file_share)}") + if file_share_protocol == "SMB": fs_url_dict: Dict[str, str] = azure_file_share.create_file_share( file_share_names=[file_share_name, scratch_name], @@ -158,40 +161,12 @@ def _deploy_azure_file_share( _scratch_folder: fs_url_dict[scratch_name], } azure_file_share.create_fileshare_folders(test_folders_share_dict) - # else: - # NFS yet to be implemented + else: + # NFS yet to be implemented + raise LisaException("Skipping NFS deployment. Pending implementation.") return fs_url_dict -# DEPRECATED !!!! -# This n exists in features.py -# def _prepare_azure_file_share_smb( -# node: Node, -# account_credential: Dict[str, str], -# test_folders_share_dict: Dict[str, str], -# fstab_info: str, -# ) -> None: -# folder_path = node.get_pure_path("/etc/smbcredentials") -# if node.shell.exists(folder_path): -# node.execute(f"rm -rf {folder_path}", sudo=True) -# node.shell.mkdir(folder_path) -# file_path = node.get_pure_path("/etc/smbcredentials/lisa.cred") -# echo = node.tools[Echo] -# username = account_credential["account_name"] -# password = account_credential["account_key"] -# echo.write_to_file(f"username={username}", file_path, sudo=True, append=True) -# echo.write_to_file(f"password={password}", file_path, sudo=True, append=True) -# node.execute("cp -f /etc/fstab /etc/fstab_cifs", sudo=True) -# for folder_name, share in test_folders_share_dict.items(): -# node.execute(f"mkdir {folder_name}", sudo=True) -# echo.write_to_file( -# f"{share} {folder_name} cifs {fstab_info}", -# node.get_pure_path("/etc/fstab"), -# sudo=True, -# append=True, -# ) - - @TestSuiteMetadata( area="storage", category="community", @@ -594,13 +569,14 @@ def verify_btrfs_nvme_datadisk(self, log_path: Path, result: TestResult) -> None @TestCaseMetadata( description=""" This test case will run cifs xfstests testing against - azure file share. + azure file share. The case will provision storage account with private endpoint and use access key // ntlmv2 for authentication. - This will change to MSI in the near future - Update the mount options via _default_smb_mount - Update the excluded cases via _default_smb_excluded_tests - Update the test cases via _default_smb_testcases + This will be changed to MSI in the near future + To modify the test case parameters, + Update the mount options via '_default_smb_mount' + Update the excluded cases via '_default_smb_excluded_tests' + Update the test cases via '_default_smb_testcases' """, requirement=simple_requirement( min_core_count=16, @@ -614,12 +590,6 @@ def verify_btrfs_nvme_datadisk(self, log_path: Path, result: TestResult) -> None def verify_azure_file_share( self, log: Logger, log_path: Path, result: TestResult ) -> None: - """ - About: This test case will run cifs xfstests testing against - azure file share - premium . - The test will create a VM, and storage account with private endpoint. - The authentication currently uses the storage account key and NTLMv2. - """ environment = result.environment assert environment, "fail to get environment from testresult" assert isinstance(environment.platform, AzurePlatform) @@ -659,6 +629,7 @@ def verify_azure_file_share( # Create excluded test file xfstests.set_excluded_tests(_default_smb_excluded_tests) # run the test + log.info("Running xfstests against azure file share") xfstests.run_test( test_section="cifs", log_path=log_path, From 04d64bb50f7f7233d79c6a3b80560cb47d7d407c Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Tue, 11 Mar 2025 08:53:38 +0530 Subject: [PATCH 19/37] Update xfstesting.py --- microsoft/testsuites/xfstests/xfstesting.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/microsoft/testsuites/xfstests/xfstesting.py b/microsoft/testsuites/xfstests/xfstesting.py index f68481ce28..7e1a5d53b8 100644 --- a/microsoft/testsuites/xfstests/xfstesting.py +++ b/microsoft/testsuites/xfstests/xfstesting.py @@ -573,7 +573,7 @@ def verify_btrfs_nvme_datadisk(self, log_path: Path, result: TestResult) -> None The case will provision storage account with private endpoint and use access key // ntlmv2 for authentication. This will be changed to MSI in the near future - To modify the test case parameters, + To modify the test case parameters: Update the mount options via '_default_smb_mount' Update the excluded cases via '_default_smb_excluded_tests' Update the test cases via '_default_smb_testcases' From aeaf07356ee687f81cb2fae85a580746c1decdec Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Tue, 11 Mar 2025 16:44:08 +0530 Subject: [PATCH 20/37] fixes and optimizations --- microsoft/testsuites/xfstests/xfstesting.py | 38 ++++--------- microsoft/testsuites/xfstests/xfstests.py | 61 +++++++-------------- 2 files changed, 32 insertions(+), 67 deletions(-) diff --git a/microsoft/testsuites/xfstests/xfstesting.py b/microsoft/testsuites/xfstests/xfstesting.py index 7e1a5d53b8..ec3b85b274 100644 --- a/microsoft/testsuites/xfstests/xfstesting.py +++ b/microsoft/testsuites/xfstests/xfstesting.py @@ -2,7 +2,7 @@ # Licensed under the MIT license. import string from pathlib import Path -from typing import Any, Dict, cast +from typing import Any, Dict, Union, cast from lisa import ( Logger, @@ -75,13 +75,11 @@ "generic/464 generic/465 generic/469 generic/524 generic/528 generic/538 " "generic/565 generic/567 generic/568 generic/586 generic/590 generic/591 " "generic/598 generic/599 generic/604 generic/609 generic/615 generic/632 " - "generic/634 generic/635 generic/637 generic/638 generic/639 " + "generic/634 generic/635 generic/637 generic/638 generic/639" ) # Section : Global options _scratch_folder = "/mnt/scratch" _test_folder = "/mnt/test" -_xfstests_repp = "https://git.kernel.org/pub/scm/fs/xfs/xfstests-dev.git" -_xfstests_branch = "master" def _prepare_data_disk( @@ -108,24 +106,12 @@ def _prepare_data_disk( node.execute(f"mkdir {mount_point}", sudo=True) -# DEPRECATED !!! -# This does not works on newer kernels. -# Pls see: -# https://lists.samba.org/archive/samba-technical/2018-June/128806.html -# def _get_smb_version(node: Node) -> str: -# if node.tools[KernelConfig].is_enabled("CONFIG_CIFS_SMB311"): -# version = "3.1.1" -# else: -# version = "3.0" -# return version - - def _deploy_azure_file_share( node: Node, environment: Environment, file_share_name: str, scratch_name: str, - azure_file_share: Any, + azure_file_share: Union[AzureFileShare, Nfs], allow_shared_key_access: bool = True, enable_private_endpoint: bool = True, storage_account_sku: str = "Standard_LRS", @@ -139,14 +125,14 @@ def _deploy_azure_file_share( Returns: Dict[str, str] - A dictionary containing the file share names and their respective URLs. """ - if isinstance(azure_file_share, AzureFileShare): - file_share_protocol = "SMB" - elif isinstance(azure_file_share, Nfs): - file_share_protocol = "NFS" - else: - raise LisaException(f"Unsupported file share type: {type(azure_file_share)}") + # if isinstance(azure_file_share, AzureFileShare): + # file_share_protocol = "SMB" + # elif isinstance(azure_file_share, Nfs): + # file_share_protocol = "NFS" + # else: + # raise LisaException(f"Unsupported file share type: {type(azure_file_share)}") - if file_share_protocol == "SMB": + if isinstance(azure_file_share, AzureFileShare): fs_url_dict: Dict[str, str] = azure_file_share.create_file_share( file_share_names=[file_share_name, scratch_name], environment=environment, @@ -161,7 +147,7 @@ def _deploy_azure_file_share( _scratch_folder: fs_url_dict[scratch_name], } azure_file_share.create_fileshare_folders(test_folders_share_dict) - else: + elif isinstance(azure_file_share, Nfs): # NFS yet to be implemented raise LisaException("Skipping NFS deployment. Pending implementation.") return fs_url_dict @@ -205,7 +191,7 @@ class Xfstesting(TestSuite): # exclude generic/680 for security reason. # include generic/211 for testing excluded_tests = ( - "generic/430 generic/431 generic/434 generic/738 xfs/438 xfs/490" + "generic/211 generic/430 generic/431 generic/434 generic/738 xfs/438 xfs/490" + " btrfs/007 btrfs/178 btrfs/244 btrfs/262" + " xfs/030 xfs/032 xfs/050 xfs/052 xfs/106 xfs/107 xfs/122 xfs/132 xfs/138" + " xfs/144 xfs/148 xfs/175 xfs/191-input-validation xfs/289 xfs/293 xfs/424" diff --git a/microsoft/testsuites/xfstests/xfstests.py b/microsoft/testsuites/xfstests/xfstests.py index b04cae1989..98534c8a7e 100644 --- a/microsoft/testsuites/xfstests/xfstests.py +++ b/microsoft/testsuites/xfstests/xfstests.py @@ -190,12 +190,11 @@ def dependencies(self) -> List[Type[Tool]]: def run_test( self, - # test_type: str, log_path: Path, result: "TestResult", test_section: str, data_disk: str = "", - test_cases: str = "", + test_cases: str = " ", timeout: int = 14400, ) -> None: """About: This method runs XFSTest on a given node with the specified @@ -229,23 +228,19 @@ def run_test( ) """ # if Test group is specified, and exists in local.config, run tests. + cmd = "" if test_section: - self.run_async( - f"-s {test_section} -E exclude.txt {test_cases} > xfstest.log 2>&1", - sudo=True, - shell=True, - force_run=True, - cwd=self.get_xfstests_path(), - ) - # Else run generic quick test. This is not recommended. + cmd += f"-s {test_section}" else: - self.run_async( - f"-g generic/quick -E exclude.txt {test_cases} > xfstest.log 2>&1", - sudo=True, - shell=True, - force_run=True, - cwd=self.get_xfstests_path(), - ) + cmd += "-g generic/quick" + cmd += f" -E exclude.txt {test_cases} > xfstest.log 2>&1" + self.run_async( + cmd, + sudo=True, + shell=True, + force_run=True, + cwd=self.get_xfstests_path(), + ) pgrep = self.node.tools[Pgrep] # this is the actual process name, when xfstests runs. @@ -640,10 +635,6 @@ def check_test_results( ) if not self.node.shell.exists(results_path): - self._log.error( - f"Result path {results_path} doesn't exist, please check testing" - " runs well or not." - ) raise LisaException( f"Result path {results_path} doesn't exist, please check testing" " runs well or not." @@ -659,14 +650,6 @@ def check_test_results( self._log.debug( f"All pass in xfstests, total pass case count is {pass_count}." ) - # Xperimental : Passedexception with message/content of XFSTestLog. - # Intent is to display XFSReport on HTML pager. - # TODO: Fix this not displaying output - raise PassedException( - f"No Failed cases found in xfstests.\n" - f"XFSTestLog: {raw_message}" - ) - fail_match = self.__fail_pattern.match(results.stdout) if fail_match: assert fail_match @@ -705,8 +688,6 @@ def save_xfstests_log( calling LISA. Files copied are xfsresult.log, check.log and all failed cases files if they exist. """ - # if "generic" == test_section: - # test_type = "xfs" xfstests_path = self.get_xfstests_path() self.node.tools[Chmod].update_folder(str(xfstests_path), "a+rwx", sudo=True) if self.node.shell.exists(xfstests_path / "results/check.log"): @@ -828,13 +809,6 @@ def create_xfstest_stack_info( return_message: str = "" # this needs to be fixed as it's spilling over to console output. if self.node.tools[Ls].path_exists(str(result_path), sudo=True): - # Note. This will dump a lot of output on debug console screen. - # Only un-comment for debugging. - # self._log.debug( - # f"Found files in path {result_path} : " - # f"{self.node.tools[Ls].list(str(result_path), sudo=True)}" - # ) - # If passed, we only need DMESG output if test_status == "PASSED": dmesg_result = self.node.tools[Cat].run( f"{result_path}/{test_id}.dmesg", force_run=True, sudo=True @@ -857,15 +831,20 @@ def create_xfstest_stack_info( dest=fail_out, ) # else if full_out is null, return the fail_out file content. - # In some test cases, full_out is not generated due to permissions - # or other issues. However a fail file will always exists in such cases. elif self.node.tools[Ls].path_exists(str(fail_out), sudo=True): fail_result = self.node.tools[Cat].run( f"{result_path}/{test_id}.out.bad", force_run=True, sudo=True, ) - diff_result = fail_result.stdout + # else if fail_out is null, return the full_out file content. + elif self.node.tools[Ls].path_exists(str(full_out), sudo=True): + fail_result = self.node.tools[Cat].run( + f"{result_path}/{test_id}.full", + force_run=True, + sudo=True, + ) + diff_result = fail_result.stdout return_message = f"DIFF: {diff_result}\n\nDMESG: {dmesg_result}" # return_message = f"DMESG: {dmesg_result.stdout}" # No output is needed. Although we can add Dmesg in the future From 11fec9d44c8f897a1f78f9ca22bc9377c36756e5 Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Wed, 12 Mar 2025 20:17:13 +0530 Subject: [PATCH 21/37] fix for long running disk tests --- microsoft/testsuites/xfstests/xfstesting.py | 63 ++++--- microsoft/testsuites/xfstests/xfstests.py | 176 ++++++++++++-------- 2 files changed, 142 insertions(+), 97 deletions(-) diff --git a/microsoft/testsuites/xfstests/xfstesting.py b/microsoft/testsuites/xfstests/xfstesting.py index ec3b85b274..c608f1244f 100644 --- a/microsoft/testsuites/xfstests/xfstesting.py +++ b/microsoft/testsuites/xfstests/xfstesting.py @@ -55,7 +55,7 @@ "generic/488 generic/489 generic/500 generic/510 generic/512 generic/520 " "generic/534 generic/535 generic/536 generic/547 generic/552 generic/557 " "generic/558 generic/559 generic/560 generic/561 generic/562 generic/570 " - "generic/589 generic/619 generic/620 generic/640" + "generic/589 generic/619 generic/620 generic/640 cifs/001" ) _default_smb_testcases: str = ( "generic/001 generic/005 generic/006 generic/007 generic/010 generic/011 " @@ -125,13 +125,6 @@ def _deploy_azure_file_share( Returns: Dict[str, str] - A dictionary containing the file share names and their respective URLs. """ - # if isinstance(azure_file_share, AzureFileShare): - # file_share_protocol = "SMB" - # elif isinstance(azure_file_share, Nfs): - # file_share_protocol = "NFS" - # else: - # raise LisaException(f"Unsupported file share type: {type(azure_file_share)}") - if isinstance(azure_file_share, AzureFileShare): fs_url_dict: Dict[str, str] = azure_file_share.create_file_share( file_share_names=[file_share_name, scratch_name], @@ -150,6 +143,8 @@ def _deploy_azure_file_share( elif isinstance(azure_file_share, Nfs): # NFS yet to be implemented raise LisaException("Skipping NFS deployment. Pending implementation.") + else: + raise LisaException(f"Unsupported file share type: {type(azure_file_share)}") return fs_url_dict @@ -189,7 +184,6 @@ class Xfstesting(TestSuite): # generic/738 case might cause hang more than 4 hours on old kernel # TODO: will figure out the detailed reason of every excluded case. # exclude generic/680 for security reason. - # include generic/211 for testing excluded_tests = ( "generic/211 generic/430 generic/431 generic/434 generic/738 xfs/438 xfs/490" + " btrfs/007 btrfs/178 btrfs/244 btrfs/262" @@ -315,7 +309,7 @@ def verify_xfs_standard_datadisk(self, log_path: Path, result: TestResult) -> No data_disks[0], f"{data_disks[0]}{suffix}1", f"{data_disks[0]}{suffix}2", - test_type=FileSystem.xfs.name, + test_type="quick", excluded_tests=self.excluded_tests, ) @@ -353,7 +347,7 @@ def verify_ext4_standard_datadisk(self, log_path: Path, result: TestResult) -> N f"{data_disks[0]}{suffix}1", f"{data_disks[0]}{suffix}2", file_system=FileSystem.ext4, - test_type=FileSystem.ext4.name, + test_type="quick", excluded_tests=self.excluded_tests, ) @@ -394,7 +388,7 @@ def verify_btrfs_standard_datadisk( f"{data_disks[0]}{suffix}1", f"{data_disks[0]}{suffix}2", file_system=FileSystem.btrfs, - test_type=FileSystem.btrfs.name, + test_type="quick", excluded_tests=self.excluded_tests, ) @@ -485,7 +479,7 @@ def verify_xfs_nvme_datadisk(self, log_path: Path, result: TestResult) -> None: nvme_data_disks[0], f"{nvme_data_disks[0]}p1", f"{nvme_data_disks[0]}p2", - test_type=FileSystem.xfs.name, + test_type="quick", excluded_tests=self.excluded_tests, ) @@ -516,7 +510,7 @@ def verify_ext4_nvme_datadisk(self, log_path: Path, result: TestResult) -> None: f"{nvme_data_disks[0]}p1", f"{nvme_data_disks[0]}p2", file_system=FileSystem.ext4, - test_type=FileSystem.ext4.name, + test_type="quick", excluded_tests=self.excluded_tests, ) @@ -548,7 +542,7 @@ def verify_btrfs_nvme_datadisk(self, log_path: Path, result: TestResult) -> None f"{nvme_data_disks[0]}p1", f"{nvme_data_disks[0]}p2", file_system=FileSystem.btrfs, - test_type=FileSystem.btrfs.name, + test_type="quick", excluded_tests=self.excluded_tests, ) @@ -558,11 +552,6 @@ def verify_btrfs_nvme_datadisk(self, log_path: Path, result: TestResult) -> None azure file share. The case will provision storage account with private endpoint and use access key // ntlmv2 for authentication. - This will be changed to MSI in the near future - To modify the test case parameters: - Update the mount options via '_default_smb_mount' - Update the excluded cases via '_default_smb_excluded_tests' - Update the test cases via '_default_smb_testcases' """, requirement=simple_requirement( min_core_count=16, @@ -591,7 +580,7 @@ def verify_azure_file_share( file_share_name = f"lisa{random_str}fs" scratch_name = f"lisa{random_str}scratch" mount_opts = ( - f"-o {_default_smb_mount},credentials=/etc/smbcredentials/lisa.cred" + f"-o {_default_smb_mount}, credentials=/etc/smbcredentials/lisa.cred" ) fs_url_dict: Dict[str, str] = _deploy_azure_file_share( node=node, @@ -648,7 +637,7 @@ def _execute_xfstests( test_dev: str = "", scratch_dev: str = "", file_system: FileSystem = FileSystem.xfs, - test_type: str = "generic", + test_type: str = "quick", test_cases: str = "", excluded_tests: str = "", mount_opts: str = "", @@ -658,7 +647,15 @@ def _execute_xfstests( assert environment, "fail to get environment from testresult" node = cast(RemoteNode, environment.nodes[0]) - + # test_group is a combination of /. + # supported values for test_type are quick, auto, db and more. + # check tests/*/group.list in xfstests-dev directory after 'make install' + # Note: you must use correct section name from local.config when using + # test_group + # a test group for XFS will fail for a config for ext or btrfs + test_group: str = "" + if test_type: + test_group = f"{file_system.name}/{test_type}" # Fix Mariner umask for xfstests if isinstance(node.os, CBLMariner): echo = node.tools[Echo] @@ -671,11 +668,7 @@ def _execute_xfstests( # exclude this case generic/641 temporarily # it will trigger oops on RHEL8.3/8.4, VM will reboot # lack of commit 5808fecc572391867fcd929662b29c12e6d08d81 - if ( - test_type == "generic" - and isinstance(node.os, Redhat) - and node.os.information.version >= "8.3.0" - ): + if isinstance(node.os, Redhat) and node.os.information.version >= "8.3.0": excluded_tests += " generic/641" # prepare data disk when xfstesting target is data disk @@ -686,14 +679,14 @@ def _execute_xfstests( {test_dev: _test_folder, scratch_dev: _scratch_folder}, file_system=file_system, ) - + # We mark test_section as the name of the file system. xfstests.set_local_config( + file_system=file_system.name, scratch_dev=scratch_dev, scratch_mnt=_scratch_folder, test_dev=test_dev, test_folder=_test_folder, - file_system=file_system.name, - test_section=test_type, + test_section=file_system.name, mount_opts=mount_opts, testfs_mount_opts=testfs_mount_opts, overwrite_config=True, @@ -701,8 +694,14 @@ def _execute_xfstests( xfstests.set_excluded_tests(excluded_tests) # Reduce run_test timeout by 30s to let it complete before case Timeout # wait_processes interval in run_test is 10s, set to 30 for safety check + # We mark test_section as the name of the file system. + # Test group is a combination of / generated previously + # test_cases is a string of test cases separated by space, can be empty. + # If specified, it will add additional cases to the ones from test_group minus + # exclusion list. xfstests.run_test( - test_section=test_type, + test_section=file_system.name, + test_group=test_group, log_path=log_path, result=result, data_disk=data_disk, diff --git a/microsoft/testsuites/xfstests/xfstests.py b/microsoft/testsuites/xfstests/xfstests.py index 98534c8a7e..a9cd97e55b 100644 --- a/microsoft/testsuites/xfstests/xfstests.py +++ b/microsoft/testsuites/xfstests/xfstests.py @@ -23,12 +23,7 @@ from lisa.testsuite import TestResult from lisa.tools import Cat, Chmod, Diff, Echo, Git, Ls, Make, Pgrep, Rm, Sed -from lisa.util import ( - LisaException, - PassedException, - UnsupportedDistroException, - find_patterns_in_lines, -) +from lisa.util import LisaException, UnsupportedDistroException, find_patterns_in_lines @dataclass @@ -193,8 +188,9 @@ def run_test( log_path: Path, result: "TestResult", test_section: str, + test_group: str = "generic/quick", data_disk: str = "", - test_cases: str = " ", + test_cases: str = "", timeout: int = 14400, ) -> None: """About: This method runs XFSTest on a given node with the specified @@ -227,13 +223,30 @@ def run_test( timeout=14400, ) """ - # if Test group is specified, and exists in local.config, run tests. + # Note : the sequence is important here. + # Do not rearrange !!!!! + # Refer to xfstests-dev guide on https://github.com/kdave/xfstests + + # Test if exclude.txt exists + xfstests_path = self.get_xfstests_path() + exclude_file_path = xfstests_path.joinpath("exclude.txt") + if self.node.shell.exists(exclude_file_path): + exclude_file = True + else: + exclude_file = False cmd = "" + if test_group: + cmd += f" -g {test_group}" if test_section: - cmd += f"-s {test_section}" - else: - cmd += "-g generic/quick" - cmd += f" -E exclude.txt {test_cases} > xfstest.log 2>&1" + cmd += f" -s {test_section}" + if exclude_file: + cmd += " -E exclude.txt" + if test_cases: + cmd += f" {test_cases}" + # Finally + cmd += " > xfstest.log 2>&1" + + # run ./check command self.run_async( cmd, sudo=True, @@ -665,8 +678,8 @@ def check_test_results( )[0][0] fail_cases_list = fail_cases.split() raise LisaException( - f"Fail {fail_count} cases of total {total_count},\n fail cases" - f" {fail_cases},\n details: \n{fail_info}, please investigate." + f"Fail {fail_count} cases of total {total_count}, \n fail cases" + f" {fail_cases}, \n details: \n{fail_info}, please investigate." ) else: # Mark the fail count as zero, else code will fail since we never @@ -806,57 +819,90 @@ def create_xfstest_stack_info( test_class = case.split("/")[0] test_id = case.split("/")[1] result_path = xfstests_path / f"results/{test_section}/{test_class}" - return_message: str = "" # this needs to be fixed as it's spilling over to console output. - if self.node.tools[Ls].path_exists(str(result_path), sudo=True): - if test_status == "PASSED": - dmesg_result = self.node.tools[Cat].run( - f"{result_path}/{test_id}.dmesg", force_run=True, sudo=True - ) - return_message = f"DMESG: {dmesg_result.stdout}" - # If failed, we need dmesg with diff output - elif test_status == "FAILED": - dmesg_result = self.node.tools[Cat].run( - f"{result_path}/{test_id}.dmesg", force_run=True, sudo=True - ) - full_out = result_path / f"{test_id}.full" - fail_out = result_path / f"{test_id}.out.bad" - # check of "full_out" and "fail_out" file exists - # Only then call diff tool. - if self.node.tools[Ls].path_exists( - str(full_out), sudo=True - ) and self.node.tools[Ls].path_exists(str(fail_out), sudo=True): - diff_result = self.node.tools[Diff].comparefiles( - src=full_out, - dest=fail_out, - ) - # else if full_out is null, return the fail_out file content. - elif self.node.tools[Ls].path_exists(str(fail_out), sudo=True): - fail_result = self.node.tools[Cat].run( - f"{result_path}/{test_id}.out.bad", - force_run=True, - sudo=True, - ) - # else if fail_out is null, return the full_out file content. - elif self.node.tools[Ls].path_exists(str(full_out), sudo=True): - fail_result = self.node.tools[Cat].run( - f"{result_path}/{test_id}.full", - force_run=True, - sudo=True, - ) - diff_result = fail_result.stdout - return_message = f"DIFF: {diff_result}\n\nDMESG: {dmesg_result}" - # return_message = f"DMESG: {dmesg_result.stdout}" - # No output is needed. Although we can add Dmesg in the future - elif test_status == "SKIPPED": - notrun_result = self.node.tools[Cat].run( - f"{result_path}/{test_id}.notrun", force_run=True, sudo=True - ) - return_message = f"NOTRUN: {notrun_result.stdout}" - else: + ls_tool = self.node.tools[Ls] + cat_tool = self.node.tools[Cat] + + if not ls_tool.path_exists(str(result_path), sudo=True): self._log.debug(f"No files found in path {result_path}") - return_message = f"No files found in path {result_path}" - self._log.debug( - f"Returning message from create_xfstest_stack_info : {return_message}" + return f"No files found in path {result_path}" + + # Prepare file paths + # dmesg is always generated. + dmesg_file = f"{result_path}/{test_id}.dmesg" + # ideally this file is also generated on each run. but under specific cases + # it may not if the test even failed to execute + full_file = f"{result_path}/{test_id}.full" + # this file is generated only when the test fails, but not necessarily always + fail_file = f"{result_path}/{test_id}.out.bad" + # this file is generated only when the test fails, but not necessarily always + hint_file = f"{result_path}/{test_id}.hints" + # this file is generated only when the test is skipped + notrun_file = f"{result_path}/{test_id}.notrun" + + # Process based on test status + if test_status == "PASSED": + dmesg_output = "" + if ls_tool.path_exists(dmesg_file, sudo=True): + dmesg_output = cat_tool.run( + dmesg_file, force_run=True, sudo=True + ).stdout + return f"DMESG: {dmesg_output}" + return "No diagnostic information available for passed test" + elif test_status == "FAILED": + # Collect dmesg info if available + dmesg_output = "" + if ls_tool.path_exists(dmesg_file, sudo=True): + dmesg_output = cat_tool.run( + dmesg_file, force_run=True, sudo=True + ).stdout + + # Collect diff or file content + diff_output = "" + full_exists = ls_tool.path_exists(full_file, sudo=True) + fail_exists = ls_tool.path_exists(fail_file, sudo=True) + hint_exists = ls_tool.path_exists(hint_file, sudo=True) + if full_exists and fail_exists: + # Both files exist - get diff + diff_output = self.node.tools[Diff].comparefiles( + src=PurePath(full_file), dest=PurePath(fail_file) + ) + elif fail_exists: + # Only failure output exists + diff_output = cat_tool.run(fail_file, force_run=True, sudo=True).stdout + elif full_exists: + # Only full log exists + diff_output = cat_tool.run(full_file, force_run=True, sudo=True).stdout + else: + diff_output = "No diff or failure output available" + + hint_output = "" + if hint_exists: + hint_output = cat_tool.run(hint_file, force_run=True, sudo=True).stdout + + # Construct return message with available information + parts = [] + if diff_output: + parts.append(f"DIFF: {diff_output}") + if dmesg_output: + parts.append(f"DMESG: {dmesg_output}") + if hint_output: + parts.append(f"HINT: {hint_output}") + + return ( + "\n\n".join(parts) if parts else "No diagnostic information available" + ) + + elif test_status == "SKIPPED": + if ls_tool.path_exists(notrun_file, sudo=True): + notrun_output = cat_tool.run( + notrun_file, force_run=True, sudo=True + ).stdout + return f"NOTRUN: {notrun_output}" + return "No notrun information available" + + # If we get here, no relevant files were found for the given test status + return ( + f"No relevant output files found for test case {case} " + f"with status {test_status}" ) - return return_message From 141a98567a5dcc1e0538381317786e8918d98929 Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Wed, 12 Mar 2025 22:54:14 +0530 Subject: [PATCH 22/37] syntax fix for CIFS, forcing test group for all other cases --- microsoft/testsuites/xfstests/xfstesting.py | 24 +++++++++++++-------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/microsoft/testsuites/xfstests/xfstesting.py b/microsoft/testsuites/xfstests/xfstesting.py index c608f1244f..ab8aa90528 100644 --- a/microsoft/testsuites/xfstests/xfstesting.py +++ b/microsoft/testsuites/xfstests/xfstesting.py @@ -309,7 +309,7 @@ def verify_xfs_standard_datadisk(self, log_path: Path, result: TestResult) -> No data_disks[0], f"{data_disks[0]}{suffix}1", f"{data_disks[0]}{suffix}2", - test_type="quick", + test_type=f"{FileSystem.xfs.name}/quick", excluded_tests=self.excluded_tests, ) @@ -347,7 +347,7 @@ def verify_ext4_standard_datadisk(self, log_path: Path, result: TestResult) -> N f"{data_disks[0]}{suffix}1", f"{data_disks[0]}{suffix}2", file_system=FileSystem.ext4, - test_type="quick", + test_type=f"{FileSystem.ext4.name}/quick", excluded_tests=self.excluded_tests, ) @@ -388,7 +388,7 @@ def verify_btrfs_standard_datadisk( f"{data_disks[0]}{suffix}1", f"{data_disks[0]}{suffix}2", file_system=FileSystem.btrfs, - test_type="quick", + test_type=f"{FileSystem.btrfs.name}/quick", excluded_tests=self.excluded_tests, ) @@ -479,7 +479,7 @@ def verify_xfs_nvme_datadisk(self, log_path: Path, result: TestResult) -> None: nvme_data_disks[0], f"{nvme_data_disks[0]}p1", f"{nvme_data_disks[0]}p2", - test_type="quick", + test_type=f"{FileSystem.xfs.name}/quick", excluded_tests=self.excluded_tests, ) @@ -510,7 +510,7 @@ def verify_ext4_nvme_datadisk(self, log_path: Path, result: TestResult) -> None: f"{nvme_data_disks[0]}p1", f"{nvme_data_disks[0]}p2", file_system=FileSystem.ext4, - test_type="quick", + test_type=f"{FileSystem.ext4.name}/quick", excluded_tests=self.excluded_tests, ) @@ -542,7 +542,7 @@ def verify_btrfs_nvme_datadisk(self, log_path: Path, result: TestResult) -> None f"{nvme_data_disks[0]}p1", f"{nvme_data_disks[0]}p2", file_system=FileSystem.btrfs, - test_type="quick", + test_type=f"{FileSystem.btrfs.name}/quick", excluded_tests=self.excluded_tests, ) @@ -580,7 +580,8 @@ def verify_azure_file_share( file_share_name = f"lisa{random_str}fs" scratch_name = f"lisa{random_str}scratch" mount_opts = ( - f"-o {_default_smb_mount}, credentials=/etc/smbcredentials/lisa.cred" + f"-o {_default_smb_mount}," # noqa: E231 + f"credentials=/etc/smbcredentials/lisa.cred" # noqa: E231 ) fs_url_dict: Dict[str, str] = _deploy_azure_file_share( node=node, @@ -607,6 +608,7 @@ def verify_azure_file_share( log.info("Running xfstests against azure file share") xfstests.run_test( test_section="cifs", + test_group="cifs/quick", log_path=log_path, result=result, test_cases=_default_smb_testcases, @@ -637,7 +639,7 @@ def _execute_xfstests( test_dev: str = "", scratch_dev: str = "", file_system: FileSystem = FileSystem.xfs, - test_type: str = "quick", + test_type: str = "generic/quick", test_cases: str = "", excluded_tests: str = "", mount_opts: str = "", @@ -654,8 +656,12 @@ def _execute_xfstests( # test_group # a test group for XFS will fail for a config for ext or btrfs test_group: str = "" - if test_type: + if not test_type: test_group = f"{file_system.name}/{test_type}" + if test_type == "generic": + test_group = "generic/quick" + else: + test_group = test_type # Fix Mariner umask for xfstests if isinstance(node.os, CBLMariner): echo = node.tools[Echo] From 5ed62f4115aecd3c799602866c3e3e3cd16fc2c6 Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Wed, 12 Mar 2025 23:02:55 +0530 Subject: [PATCH 23/37] Update xfstesting.py --- microsoft/testsuites/xfstests/xfstesting.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/microsoft/testsuites/xfstests/xfstesting.py b/microsoft/testsuites/xfstests/xfstesting.py index ab8aa90528..f043ea5150 100644 --- a/microsoft/testsuites/xfstests/xfstesting.py +++ b/microsoft/testsuites/xfstests/xfstesting.py @@ -656,9 +656,9 @@ def _execute_xfstests( # test_group # a test group for XFS will fail for a config for ext or btrfs test_group: str = "" - if not test_type: - test_group = f"{file_system.name}/{test_type}" - if test_type == "generic": + if not test_type or test_type == file_system.name: + test_group = f"{file_system.name}/quick" + elif test_type == "generic": test_group = "generic/quick" else: test_group = test_type From f14cbe8676557690b05fdc3bf4c0144cfa83f21f Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Thu, 13 Mar 2025 21:04:15 +0530 Subject: [PATCH 24/37] performance fixes for diagnostics message generation --- microsoft/testsuites/xfstests/xfstesting.py | 3 -- microsoft/testsuites/xfstests/xfstests.py | 58 +++++++++++---------- 2 files changed, 30 insertions(+), 31 deletions(-) diff --git a/microsoft/testsuites/xfstests/xfstesting.py b/microsoft/testsuites/xfstests/xfstesting.py index f043ea5150..6293e074f5 100644 --- a/microsoft/testsuites/xfstests/xfstesting.py +++ b/microsoft/testsuites/xfstests/xfstesting.py @@ -116,7 +116,6 @@ def _deploy_azure_file_share( enable_private_endpoint: bool = True, storage_account_sku: str = "Standard_LRS", storage_account_kind: str = "StorageV2", - file_share_protocol: str = "SMB", file_share_quota_in_gb: int = 500, ) -> Dict[str, str]: """ @@ -658,8 +657,6 @@ def _execute_xfstests( test_group: str = "" if not test_type or test_type == file_system.name: test_group = f"{file_system.name}/quick" - elif test_type == "generic": - test_group = "generic/quick" else: test_group = test_type # Fix Mariner umask for xfstests diff --git a/microsoft/testsuites/xfstests/xfstests.py b/microsoft/testsuites/xfstests/xfstests.py index a9cd97e55b..9d3c1c9b14 100644 --- a/microsoft/testsuites/xfstests/xfstests.py +++ b/microsoft/testsuites/xfstests/xfstests.py @@ -22,7 +22,7 @@ if TYPE_CHECKING: from lisa.testsuite import TestResult -from lisa.tools import Cat, Chmod, Diff, Echo, Git, Ls, Make, Pgrep, Rm, Sed +from lisa.tools import Cat, Chmod, Diff, Echo, Git, Make, Pgrep, Rm, Sed from lisa.util import LisaException, UnsupportedDistroException, find_patterns_in_lines @@ -625,10 +625,6 @@ def check_test_results( fail_cases_list: List[str] = [] try: if not self.node.shell.exists(console_log_results_path): - self._log.error( - f"Console log path {console_log_results_path} doesn't exist, please" - " check testing runs well or not." - ) raise LisaException( f"Console log path {console_log_results_path} doesn't exist, " "please check testing runs well or not." @@ -665,7 +661,6 @@ def check_test_results( ) fail_match = self.__fail_pattern.match(results.stdout) if fail_match: - assert fail_match fail_count = fail_match.group("fail_count") total_count = fail_match.group("total_count") fail_cases_match = self.__fail_cases_pattern.match(results.stdout) @@ -678,8 +673,9 @@ def check_test_results( )[0][0] fail_cases_list = fail_cases.split() raise LisaException( - f"Fail {fail_count} cases of total {total_count}, \n fail cases" - f" {fail_cases}, \n details: \n{fail_info}, please investigate." + f"Fail {fail_count} cases of total {total_count}, " + f"\n\nfail cases: {fail_cases}, " + f"\n\ndetails: \n\n{fail_info}, \n\nplease investigate." ) else: # Mark the fail count as zero, else code will fail since we never @@ -820,65 +816,71 @@ def create_xfstest_stack_info( test_id = case.split("/")[1] result_path = xfstests_path / f"results/{test_section}/{test_class}" # this needs to be fixed as it's spilling over to console output. - ls_tool = self.node.tools[Ls] + # ls_tool = self.node.tools[Ls] cat_tool = self.node.tools[Cat] - if not ls_tool.path_exists(str(result_path), sudo=True): + if not self.node.shell.exists(result_path): self._log.debug(f"No files found in path {result_path}") return f"No files found in path {result_path}" # Prepare file paths # dmesg is always generated. - dmesg_file = f"{result_path}/{test_id}.dmesg" + dmesg_file = result_path / f"{test_id}.dmesg" # ideally this file is also generated on each run. but under specific cases # it may not if the test even failed to execute - full_file = f"{result_path}/{test_id}.full" + full_file = result_path / f"{test_id}.full" # this file is generated only when the test fails, but not necessarily always - fail_file = f"{result_path}/{test_id}.out.bad" + fail_file = result_path / f"{test_id}.out.bad" # this file is generated only when the test fails, but not necessarily always - hint_file = f"{result_path}/{test_id}.hints" + hint_file = result_path / f"{test_id}.hints" # this file is generated only when the test is skipped - notrun_file = f"{result_path}/{test_id}.notrun" + notrun_file = result_path / f"{test_id}.notrun" # Process based on test status if test_status == "PASSED": dmesg_output = "" - if ls_tool.path_exists(dmesg_file, sudo=True): + if self.node.shell.exists(dmesg_file): dmesg_output = cat_tool.run( - dmesg_file, force_run=True, sudo=True + str(dmesg_file), force_run=True, sudo=True ).stdout return f"DMESG: {dmesg_output}" return "No diagnostic information available for passed test" elif test_status == "FAILED": # Collect dmesg info if available dmesg_output = "" - if ls_tool.path_exists(dmesg_file, sudo=True): + if self.node.shell.exists(dmesg_file): dmesg_output = cat_tool.run( - dmesg_file, force_run=True, sudo=True + str(dmesg_file), force_run=True, sudo=True ).stdout # Collect diff or file content diff_output = "" - full_exists = ls_tool.path_exists(full_file, sudo=True) - fail_exists = ls_tool.path_exists(fail_file, sudo=True) - hint_exists = ls_tool.path_exists(hint_file, sudo=True) + full_exists = self.node.shell.exists(full_file) + fail_exists = self.node.shell.exists(fail_file) + hint_exists = self.node.shell.exists(hint_file) if full_exists and fail_exists: # Both files exist - get diff diff_output = self.node.tools[Diff].comparefiles( - src=PurePath(full_file), dest=PurePath(fail_file) + src=full_file, dest=fail_file ) elif fail_exists: # Only failure output exists - diff_output = cat_tool.run(fail_file, force_run=True, sudo=True).stdout + diff_output = cat_tool.run( + str(fail_file), force_run=True, sudo=True + ).stdout elif full_exists: # Only full log exists - diff_output = cat_tool.run(full_file, force_run=True, sudo=True).stdout + diff_output = cat_tool.run( + str(full_file), force_run=True, sudo=True + ).stdout else: diff_output = "No diff or failure output available" hint_output = "" if hint_exists: - hint_output = cat_tool.run(hint_file, force_run=True, sudo=True).stdout + hint_output = cat_tool.run( + str(hint_file), force_run=True, sudo=True + ).stdout # Construct return message with available information parts = [] @@ -894,9 +896,9 @@ def create_xfstest_stack_info( ) elif test_status == "SKIPPED": - if ls_tool.path_exists(notrun_file, sudo=True): + if self.node.shell.exists(notrun_file): notrun_output = cat_tool.run( - notrun_file, force_run=True, sudo=True + str(notrun_file), force_run=True, sudo=True ).stdout return f"NOTRUN: {notrun_output}" return "No notrun information available" From 41eb6fb788499a6fee8ec4e8b7e524386b6fa48f Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Thu, 13 Mar 2025 23:54:18 +0530 Subject: [PATCH 25/37] move generic 586 for Azfiles to excluded list --- microsoft/testsuites/xfstests/xfstesting.py | 8 ++++---- microsoft/testsuites/xfstests/xfstests.py | 1 + 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/microsoft/testsuites/xfstests/xfstesting.py b/microsoft/testsuites/xfstests/xfstesting.py index 6293e074f5..a27aad652d 100644 --- a/microsoft/testsuites/xfstests/xfstesting.py +++ b/microsoft/testsuites/xfstests/xfstesting.py @@ -55,7 +55,7 @@ "generic/488 generic/489 generic/500 generic/510 generic/512 generic/520 " "generic/534 generic/535 generic/536 generic/547 generic/552 generic/557 " "generic/558 generic/559 generic/560 generic/561 generic/562 generic/570 " - "generic/589 generic/619 generic/620 generic/640 cifs/001" + "generic/586 generic/589 generic/619 generic/620 generic/640 cifs/001" ) _default_smb_testcases: str = ( "generic/001 generic/005 generic/006 generic/007 generic/010 generic/011 " @@ -73,9 +73,9 @@ "generic/406 generic/412 generic/422 generic/428 generic/432 generic/433 " "generic/437 generic/443 generic/450 generic/451 generic/452 generic/460 " "generic/464 generic/465 generic/469 generic/524 generic/528 generic/538 " - "generic/565 generic/567 generic/568 generic/586 generic/590 generic/591 " - "generic/598 generic/599 generic/604 generic/609 generic/615 generic/632 " - "generic/634 generic/635 generic/637 generic/638 generic/639" + "generic/565 generic/567 generic/568 generic/590 generic/591 generic/598" + "generic/599 generic/604 generic/609 generic/615 generic/632 generic/634" + "generic/635 generic/637 generic/638 generic/639" ) # Section : Global options _scratch_folder = "/mnt/scratch" diff --git a/microsoft/testsuites/xfstests/xfstests.py b/microsoft/testsuites/xfstests/xfstests.py index 9d3c1c9b14..15f3e723e0 100644 --- a/microsoft/testsuites/xfstests/xfstests.py +++ b/microsoft/testsuites/xfstests/xfstests.py @@ -821,6 +821,7 @@ def create_xfstest_stack_info( if not self.node.shell.exists(result_path): self._log.debug(f"No files found in path {result_path}") + # non terminating error !!! return f"No files found in path {result_path}" # Prepare file paths From 6360ad78430bf87d88f0b8fab9a04372e006275f Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Fri, 14 Mar 2025 11:46:21 +0530 Subject: [PATCH 26/37] reducing timeout back to 4 hours reducing timeout to 4 hours as original since we no longer are seeing issues with quick tests running > 2 hours with new XFStests code --- microsoft/testsuites/xfstests/xfstesting.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/microsoft/testsuites/xfstests/xfstesting.py b/microsoft/testsuites/xfstests/xfstesting.py index a27aad652d..6ec3400055 100644 --- a/microsoft/testsuites/xfstests/xfstesting.py +++ b/microsoft/testsuites/xfstests/xfstesting.py @@ -158,7 +158,7 @@ def _deploy_azure_file_share( class Xfstesting(TestSuite): # Use xfstests benchmark to test the different types of data disk, # it will run many cases, so the runtime is longer than usual case. - TIME_OUT = 21600 + TIME_OUT = 14400 # 4 hours # TODO: will include btrfs/244 once the kernel contains below fix. # exclude btrfs/244 temporarily for below commit not picked up by distro vendor. # https://git.kernel.org/pub/scm/linux/kernel/git/next/linux-next.git/commit/fs/btrfs/volumes.c?id=e4571b8c5e9ffa1e85c0c671995bd4dcc5c75091 # noqa: E501 From 31949fee33c9376d52fb531cdd1aba79de87d62d Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Sun, 16 Mar 2025 19:39:37 +0530 Subject: [PATCH 27/37] fixed issue of >6 return statements in create_xfstest_stack_info() --- microsoft/testsuites/xfstests/xfstests.py | 179 +++++++++++----------- 1 file changed, 93 insertions(+), 86 deletions(-) diff --git a/microsoft/testsuites/xfstests/xfstests.py b/microsoft/testsuites/xfstests/xfstests.py index 15f3e723e0..f9bc38543a 100644 --- a/microsoft/testsuites/xfstests/xfstests.py +++ b/microsoft/testsuites/xfstests/xfstests.py @@ -818,94 +818,101 @@ def create_xfstest_stack_info( # this needs to be fixed as it's spilling over to console output. # ls_tool = self.node.tools[Ls] cat_tool = self.node.tools[Cat] - + result = "" if not self.node.shell.exists(result_path): self._log.debug(f"No files found in path {result_path}") # non terminating error !!! - return f"No files found in path {result_path}" - - # Prepare file paths - # dmesg is always generated. - dmesg_file = result_path / f"{test_id}.dmesg" - # ideally this file is also generated on each run. but under specific cases - # it may not if the test even failed to execute - full_file = result_path / f"{test_id}.full" - # this file is generated only when the test fails, but not necessarily always - fail_file = result_path / f"{test_id}.out.bad" - # this file is generated only when the test fails, but not necessarily always - hint_file = result_path / f"{test_id}.hints" - # this file is generated only when the test is skipped - notrun_file = result_path / f"{test_id}.notrun" - - # Process based on test status - if test_status == "PASSED": - dmesg_output = "" - if self.node.shell.exists(dmesg_file): - dmesg_output = cat_tool.run( - str(dmesg_file), force_run=True, sudo=True - ).stdout - return f"DMESG: {dmesg_output}" - return "No diagnostic information available for passed test" - elif test_status == "FAILED": - # Collect dmesg info if available - dmesg_output = "" - if self.node.shell.exists(dmesg_file): - dmesg_output = cat_tool.run( - str(dmesg_file), force_run=True, sudo=True - ).stdout - - # Collect diff or file content - diff_output = "" - full_exists = self.node.shell.exists(full_file) - fail_exists = self.node.shell.exists(fail_file) - hint_exists = self.node.shell.exists(hint_file) - if full_exists and fail_exists: - # Both files exist - get diff - diff_output = self.node.tools[Diff].comparefiles( - src=full_file, dest=fail_file + result = f"No files found in path {result_path}" + else: + # Prepare file paths + # dmesg is always generated. + dmesg_file = result_path / f"{test_id}.dmesg" + # ideally this file is also generated on each run. but under specific cases + # it may not if the test even failed to execute + full_file = result_path / f"{test_id}.full" + # this file is generated only when the test fails, but not necessarily + # always + fail_file = result_path / f"{test_id}.out.bad" + # this file is generated only when the test fails, but not necessarily + # always + hint_file = result_path / f"{test_id}.hints" + # this file is generated only when the test is skipped + notrun_file = result_path / f"{test_id}.notrun" + + # Process based on test status + if test_status == "PASSED": + dmesg_output = "" + if self.node.shell.exists(dmesg_file): + dmesg_output = cat_tool.run( + str(dmesg_file), force_run=True, sudo=True + ).stdout + result = f"DMESG: {dmesg_output}" + else: + result = "No diagnostic information available for passed test" + elif test_status == "FAILED": + # Collect dmesg info if available + dmesg_output = "" + if self.node.shell.exists(dmesg_file): + dmesg_output = cat_tool.run( + str(dmesg_file), force_run=True, sudo=True + ).stdout + + # Collect diff or file content + diff_output = "" + full_exists = self.node.shell.exists(full_file) + fail_exists = self.node.shell.exists(fail_file) + hint_exists = self.node.shell.exists(hint_file) + if full_exists and fail_exists: + # Both files exist - get diff + diff_output = self.node.tools[Diff].comparefiles( + src=full_file, dest=fail_file + ) + elif fail_exists: + # Only failure output exists + diff_output = cat_tool.run( + str(fail_file), force_run=True, sudo=True + ).stdout + elif full_exists: + # Only full log exists + diff_output = cat_tool.run( + str(full_file), force_run=True, sudo=True + ).stdout + else: + diff_output = "No diff or failure output available" + + hint_output = "" + if hint_exists: + hint_output = cat_tool.run( + str(hint_file), force_run=True, sudo=True + ).stdout + + # Construct return message with available information + parts = [] + if diff_output: + parts.append(f"DIFF: {diff_output}") + if dmesg_output: + parts.append(f"DMESG: {dmesg_output}") + if hint_output: + parts.append(f"HINT: {hint_output}") + + result = ( + "\n\n".join(parts) + if parts + else "No diagnostic information available" ) - elif fail_exists: - # Only failure output exists - diff_output = cat_tool.run( - str(fail_file), force_run=True, sudo=True - ).stdout - elif full_exists: - # Only full log exists - diff_output = cat_tool.run( - str(full_file), force_run=True, sudo=True - ).stdout - else: - diff_output = "No diff or failure output available" - - hint_output = "" - if hint_exists: - hint_output = cat_tool.run( - str(hint_file), force_run=True, sudo=True - ).stdout - - # Construct return message with available information - parts = [] - if diff_output: - parts.append(f"DIFF: {diff_output}") - if dmesg_output: - parts.append(f"DMESG: {dmesg_output}") - if hint_output: - parts.append(f"HINT: {hint_output}") - - return ( - "\n\n".join(parts) if parts else "No diagnostic information available" - ) - elif test_status == "SKIPPED": - if self.node.shell.exists(notrun_file): - notrun_output = cat_tool.run( - str(notrun_file), force_run=True, sudo=True - ).stdout - return f"NOTRUN: {notrun_output}" - return "No notrun information available" - - # If we get here, no relevant files were found for the given test status - return ( - f"No relevant output files found for test case {case} " - f"with status {test_status}" - ) + elif test_status == "SKIPPED": + if self.node.shell.exists(notrun_file): + notrun_output = cat_tool.run( + str(notrun_file), force_run=True, sudo=True + ).stdout + result = f"NOTRUN: {notrun_output}" + else: + result = "No notrun information available" + else: + # If we get here, no relevant files were found for the given test status + result = ( + f"No relevant output files found for test case {case} " + f"with status {test_status}" + ) + return result From 755039ccc2b7ece260ba942be2cd476d8b235347 Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Tue, 18 Mar 2025 15:07:38 +0530 Subject: [PATCH 28/37] remove type_checking for xfstests.py tool --- microsoft/testsuites/xfstests/xfstests.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/microsoft/testsuites/xfstests/xfstests.py b/microsoft/testsuites/xfstests/xfstests.py index f9bc38543a..3c0cc868a0 100644 --- a/microsoft/testsuites/xfstests/xfstests.py +++ b/microsoft/testsuites/xfstests/xfstests.py @@ -3,7 +3,7 @@ import re from dataclasses import dataclass from pathlib import Path, PurePath -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Type, cast +from typing import Any, Dict, List, Optional, Type, cast from assertpy import assert_that @@ -18,10 +18,7 @@ Suse, Ubuntu, ) - -if TYPE_CHECKING: - from lisa.testsuite import TestResult - +from lisa.testsuite import TestResult from lisa.tools import Cat, Chmod, Diff, Echo, Git, Make, Pgrep, Rm, Sed from lisa.util import LisaException, UnsupportedDistroException, find_patterns_in_lines From 4a4ab64a898b880278c560df4b5b43f5ffc30928 Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Tue, 18 Mar 2025 20:35:07 +0530 Subject: [PATCH 29/37] add note for device ID for mount points --- microsoft/testsuites/xfstests/xfstests.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/microsoft/testsuites/xfstests/xfstests.py b/microsoft/testsuites/xfstests/xfstests.py index 3c0cc868a0..6f336357f0 100644 --- a/microsoft/testsuites/xfstests/xfstests.py +++ b/microsoft/testsuites/xfstests/xfstests.py @@ -460,6 +460,11 @@ def set_local_config( overwrite_config=True ) Note: This method will by default enforce dmesg logging. + Note2: Its imperitive that disk labels are set correctly for the tests + to run. + We highly advise to fetch the labels at runtime and not hardcode them. + _prepare_data_disk() method in xfstesting.py is a good example of this. + Note3: The test folder should be created before running the tests. All tests will have a corresponding dmesg log file in output folder. """ xfstests_path = self.get_xfstests_path() From 40412e7f713464256b2af84599d0e768b48d8a97 Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Wed, 19 Mar 2025 11:39:31 +0530 Subject: [PATCH 30/37] Flake8 fix and AZfiles test priority change --- microsoft/testsuites/xfstests/xfstesting.py | 2 +- microsoft/testsuites/xfstests/xfstests.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/microsoft/testsuites/xfstests/xfstesting.py b/microsoft/testsuites/xfstests/xfstesting.py index 6ec3400055..4f1e9effe2 100644 --- a/microsoft/testsuites/xfstests/xfstesting.py +++ b/microsoft/testsuites/xfstests/xfstesting.py @@ -559,7 +559,7 @@ def verify_btrfs_nvme_datadisk(self, log_path: Path, result: TestResult) -> None ), timeout=TIME_OUT, use_new_environment=True, - priority=3, + priority=5, ) def verify_azure_file_share( self, log: Logger, log_path: Path, result: TestResult diff --git a/microsoft/testsuites/xfstests/xfstests.py b/microsoft/testsuites/xfstests/xfstests.py index 6f336357f0..a82adaca9e 100644 --- a/microsoft/testsuites/xfstests/xfstests.py +++ b/microsoft/testsuites/xfstests/xfstests.py @@ -460,7 +460,7 @@ def set_local_config( overwrite_config=True ) Note: This method will by default enforce dmesg logging. - Note2: Its imperitive that disk labels are set correctly for the tests + Note2: Its imperitive that disk labels are set correctly for the tests to run. We highly advise to fetch the labels at runtime and not hardcode them. _prepare_data_disk() method in xfstesting.py is a good example of this. From e12eed1bf692949b6143ff2307bcb1ade5e79145 Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Thu, 20 Mar 2025 20:10:40 +0530 Subject: [PATCH 31/37] fix for Azure files SMB multi channel + share def changes to accept list of share names --- microsoft/testsuites/xfstests/xfstesting.py | 27 ++++++------ microsoft/testsuites/xfstests/xfstests.py | 47 ++++++++++++--------- 2 files changed, 42 insertions(+), 32 deletions(-) diff --git a/microsoft/testsuites/xfstests/xfstesting.py b/microsoft/testsuites/xfstests/xfstesting.py index 4f1e9effe2..43d93a5a0b 100644 --- a/microsoft/testsuites/xfstests/xfstesting.py +++ b/microsoft/testsuites/xfstests/xfstesting.py @@ -2,7 +2,7 @@ # Licensed under the MIT license. import string from pathlib import Path -from typing import Any, Dict, Union, cast +from typing import Any, Dict, List, Union, cast from lisa import ( Logger, @@ -106,17 +106,19 @@ def _prepare_data_disk( node.execute(f"mkdir {mount_point}", sudo=True) +# Updates as of march 2025. +# Default premium SKU will be used for file share creation. +# This will ensure SMB multi channel is enabled by default def _deploy_azure_file_share( node: Node, environment: Environment, - file_share_name: str, - scratch_name: str, + names: List[str], azure_file_share: Union[AzureFileShare, Nfs], allow_shared_key_access: bool = True, enable_private_endpoint: bool = True, - storage_account_sku: str = "Standard_LRS", - storage_account_kind: str = "StorageV2", - file_share_quota_in_gb: int = 500, + storage_account_sku: str = "Premium_LRS", + storage_account_kind: str = "FileStorage", + file_share_quota_in_gb: int = 100, ) -> Dict[str, str]: """ About: This method will provision azure file shares on a new // existing @@ -126,7 +128,7 @@ def _deploy_azure_file_share( """ if isinstance(azure_file_share, AzureFileShare): fs_url_dict: Dict[str, str] = azure_file_share.create_file_share( - file_share_names=[file_share_name, scratch_name], + file_share_names=names, environment=environment, sku=storage_account_sku, kind=storage_account_kind, @@ -135,13 +137,13 @@ def _deploy_azure_file_share( quota_in_gb=file_share_quota_in_gb, ) test_folders_share_dict = { - _test_folder: fs_url_dict[file_share_name], - _scratch_folder: fs_url_dict[scratch_name], + _test_folder: fs_url_dict[names[0]], + _scratch_folder: fs_url_dict[names[1]], } azure_file_share.create_fileshare_folders(test_folders_share_dict) elif isinstance(azure_file_share, Nfs): # NFS yet to be implemented - raise LisaException("Skipping NFS deployment. Pending implementation.") + raise SkippedException("Skipping NFS deployment. Pending implementation.") else: raise LisaException(f"Unsupported file share type: {type(azure_file_share)}") return fs_url_dict @@ -585,8 +587,9 @@ def verify_azure_file_share( fs_url_dict: Dict[str, str] = _deploy_azure_file_share( node=node, environment=environment, - file_share_name=file_share_name, - scratch_name=scratch_name, + # file_share_name=file_share_name, + # scratch_name=scratch_name, + names=[file_share_name, scratch_name], azure_file_share=azure_file_share, ) # Create Xfstest config diff --git a/microsoft/testsuites/xfstests/xfstests.py b/microsoft/testsuites/xfstests/xfstests.py index a82adaca9e..65075298d9 100644 --- a/microsoft/testsuites/xfstests/xfstests.py +++ b/microsoft/testsuites/xfstests/xfstests.py @@ -184,38 +184,45 @@ def run_test( self, log_path: Path, result: "TestResult", - test_section: str, + test_section: str = "", test_group: str = "generic/quick", data_disk: str = "", test_cases: str = "", timeout: int = 14400, ) -> None: """About: This method runs XFSTest on a given node with the specified - test group and test cases.If test_section is not specified , test is - run with "generic/quick" classification and XFS environment variables. - If test_section is specified, test is run with the specified test group - and XFS environment variables from local.config.If test_cases is specified, - only the specified test cases are run.If empty, all test cases barring - exclude.txt entries are run.Runtime is set to 4 hours by default, - but can be overridden by the user.This method after running xfstest - will parse the output and sends subtest results to the test result object. + test group and test cases Parameters: - log_path: The path where the xfstests logs will be saved - result: The LISA test result object to which the subtest results will be sent - test_section: The test group name to be used for testing. - Defaults to "generic/quick" - note: if specified, test_section must exist in local.config - data_disk: The data disk used for testing - test_cases: The test cases to be run. If empty, all installed test cases - barring exclude.txt entries are run - timeout: The time in seconds after which the test will be timed out. + log_path (Path): (Mandatory)The path where the xfstests logs will be saved + result (TestResult): (Mandatory The LISA test result object to which the + subtest results will be sent + test_section (Str): (Optional)The test section name to be used for testing. + Defaults to empty string. If not specified, xfstests will use environment + variables and any first entries in local.config to run tests + note: if specified, test_section must exist in local.config. There is no + local checks in code + test_group (str): The test group to be used for testing. Defaults to + generic/quick. test_group signifies the basic mandatory tests to run. + Normally this is /quick but can be any one of the values from + groups.list in tests/ directory. + If passed as "", it will be ignored and xfstests will run all tests. + data_disk(st): The data disk device ID used for testing as scratch and mount + space + test_cases(str): Intended to be used in conjunction with test_group. + This is a space separated list of test cases to be run. If passed as "", + it will be ignored. test_cases signifies additional cases to be run apart + from the group tests and exclusion list from exclude.txt previously + generated and put in the tool path. Its usefull for mixing and matching + test cases from different file systems, example xfs tests and generic tests. + timeout(int): The time in seconds after which the test run will be timed out. Defaults to 4 hours. Example: xfstest.run_test( log_path=Path("/tmp/xfstests"), result=test_result, - test_section="generic/quick", - data_disk="/dev/sdb", + test_section="ext4" + test_group="generic/quick", + data_disk="/dev/sdd", test_cases="generic/001 generic/002", timeout=14400, ) From ba402d980dab4ff1e7aee41f7016b8a1c9ca1c2b Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Thu, 20 Mar 2025 21:10:39 +0530 Subject: [PATCH 32/37] Update xfstests.py --- microsoft/testsuites/xfstests/xfstests.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/microsoft/testsuites/xfstests/xfstests.py b/microsoft/testsuites/xfstests/xfstests.py index 65075298d9..f6f82fcf03 100644 --- a/microsoft/testsuites/xfstests/xfstests.py +++ b/microsoft/testsuites/xfstests/xfstests.py @@ -211,8 +211,8 @@ def run_test( test_cases(str): Intended to be used in conjunction with test_group. This is a space separated list of test cases to be run. If passed as "", it will be ignored. test_cases signifies additional cases to be run apart - from the group tests and exclusion list from exclude.txt previously - generated and put in the tool path. Its usefull for mixing and matching + from the group tests and exclusion list from exclude.txt previously + generated and put in the tool path. Its usefull for mixing and matching test cases from different file systems, example xfs tests and generic tests. timeout(int): The time in seconds after which the test run will be timed out. Defaults to 4 hours. From b29dabe40503613cfe501614edfb911efd77bb29 Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Tue, 25 Mar 2025 13:25:10 +0530 Subject: [PATCH 33/37] Update xfstesting.py --- microsoft/testsuites/xfstests/xfstesting.py | 97 +++++++++++++-------- 1 file changed, 63 insertions(+), 34 deletions(-) diff --git a/microsoft/testsuites/xfstests/xfstesting.py b/microsoft/testsuites/xfstests/xfstesting.py index 43d93a5a0b..7ea070e32c 100644 --- a/microsoft/testsuites/xfstests/xfstesting.py +++ b/microsoft/testsuites/xfstests/xfstesting.py @@ -2,7 +2,7 @@ # Licensed under the MIT license. import string from pathlib import Path -from typing import Any, Dict, List, Union, cast +from typing import Any, Dict, Union, cast from lisa import ( Logger, @@ -112,7 +112,7 @@ def _prepare_data_disk( def _deploy_azure_file_share( node: Node, environment: Environment, - names: List[str], + names: Dict[str, str], azure_file_share: Union[AzureFileShare, Nfs], allow_shared_key_access: bool = True, enable_private_endpoint: bool = True, @@ -128,7 +128,7 @@ def _deploy_azure_file_share( """ if isinstance(azure_file_share, AzureFileShare): fs_url_dict: Dict[str, str] = azure_file_share.create_file_share( - file_share_names=names, + file_share_names=list(names.values()), environment=environment, sku=storage_account_sku, kind=storage_account_kind, @@ -136,10 +136,9 @@ def _deploy_azure_file_share( enable_private_endpoint=enable_private_endpoint, quota_in_gb=file_share_quota_in_gb, ) - test_folders_share_dict = { - _test_folder: fs_url_dict[names[0]], - _scratch_folder: fs_url_dict[names[1]], - } + test_folders_share_dict: Dict[str, str] = {} + for key, value in names.items(): + test_folders_share_dict[key] = fs_url_dict[value] azure_file_share.create_fileshare_folders(test_folders_share_dict) elif isinstance(azure_file_share, Nfs): # NFS yet to be implemented @@ -575,6 +574,13 @@ def verify_azure_file_share( node.os, "current distro not enable cifs module." ) xfstests = self._install_xfstests(node) + # These local variables are needed to track resource retention + # on demand / test failure. + # This is to ensure that the storage account is not deleted + # if the test fails and the keep_environment is set to "always" or "failed". + + keep_environment = environment.platform.runbook.keep_environment + test_failed: bool = False azure_file_share = node.features[AzureFileShare] random_str = generate_random_chars(string.ascii_lowercase + string.digits, 10) @@ -587,35 +593,58 @@ def verify_azure_file_share( fs_url_dict: Dict[str, str] = _deploy_azure_file_share( node=node, environment=environment, - # file_share_name=file_share_name, - # scratch_name=scratch_name, - names=[file_share_name, scratch_name], + names={ + _test_folder: file_share_name, + _scratch_folder: scratch_name, + }, azure_file_share=azure_file_share, ) - # Create Xfstest config - xfstests.set_local_config( - scratch_dev=fs_url_dict[scratch_name], - scratch_mnt=_scratch_folder, - test_dev=fs_url_dict[file_share_name], - test_folder=_test_folder, - file_system="cifs", - test_section="cifs", - mount_opts=mount_opts, - testfs_mount_opts=mount_opts, - overwrite_config=True, - ) - # Create excluded test file - xfstests.set_excluded_tests(_default_smb_excluded_tests) - # run the test - log.info("Running xfstests against azure file share") - xfstests.run_test( - test_section="cifs", - test_group="cifs/quick", - log_path=log_path, - result=result, - test_cases=_default_smb_testcases, - timeout=self.TIME_OUT - 30, - ) + try: + # Create Xfstest config + xfstests.set_local_config( + scratch_dev=fs_url_dict[scratch_name], + scratch_mnt=_scratch_folder, + test_dev=fs_url_dict[file_share_name], + test_folder=_test_folder, + file_system="cifs", + test_section="cifs", + mount_opts=mount_opts, + testfs_mount_opts=mount_opts, + overwrite_config=True, + ) + # Create excluded test file + xfstests.set_excluded_tests(_default_smb_excluded_tests) + # run the test + log.info("Running xfstests against azure file share") + xfstests.run_test( + test_section="cifs", + test_group="cifs/quick", + log_path=log_path, + result=result, + test_cases=_default_smb_testcases, + timeout=self.TIME_OUT - 30, + ) + except Exception as e: + log.error(f"Error running xfstests against azure file share: {str(e)}") + test_failed = True + finally: + # If test_failed is true and keep_environment is Always / Failed, we keep + # the storage account, else we delete it. + if keep_environment in ["failed", "always"]: + if test_failed is True: + log.info("Keeping Azure file share for manual testing.") + else: + log.info( + "Keeping Azure file share as keep_environment is set to 'True'." + ) + else: + log.info( + "Deleting Azure file share as keep_environment is set to 'False'." + ) + # this will ensure that the expensive storage resources are decom + # before the test case ends. + # This is important to avoid incurring unnecessary costs. + azure_file_share.delete_azure_fileshare([file_share_name, scratch_name]) def after_case(self, log: Logger, **kwargs: Any) -> None: try: From a5e70523b14d839ac1f8b37144382f117e334036 Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Tue, 25 Mar 2025 19:43:13 +0530 Subject: [PATCH 34/37] Minor bug fix due to un-intended except block not correctly reflecting correct test status on console. --- microsoft/testsuites/xfstests/xfstesting.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/microsoft/testsuites/xfstests/xfstesting.py b/microsoft/testsuites/xfstests/xfstesting.py index 7ea070e32c..916692dbfc 100644 --- a/microsoft/testsuites/xfstests/xfstesting.py +++ b/microsoft/testsuites/xfstests/xfstesting.py @@ -577,8 +577,7 @@ def verify_azure_file_share( # These local variables are needed to track resource retention # on demand / test failure. # This is to ensure that the storage account is not deleted - # if the test fails and the keep_environment is set to "always" or "failed". - + # if the test fails and the keep_environment is set to "always" or "failed" keep_environment = environment.platform.runbook.keep_environment test_failed: bool = False @@ -624,12 +623,11 @@ def verify_azure_file_share( test_cases=_default_smb_testcases, timeout=self.TIME_OUT - 30, ) - except Exception as e: - log.error(f"Error running xfstests against azure file share: {str(e)}") - test_failed = True finally: # If test_failed is true and keep_environment is Always / Failed, we keep # the storage account, else we delete it. + if result.status == "FAILED": + test_failed = True if keep_environment in ["failed", "always"]: if test_failed is True: log.info("Keeping Azure file share for manual testing.") From 283883009cacda703a32a373c07e2fd3e3f168f7 Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Wed, 26 Mar 2025 02:18:26 +0530 Subject: [PATCH 35/37] Update xfstests.py --- microsoft/testsuites/xfstests/xfstests.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/microsoft/testsuites/xfstests/xfstests.py b/microsoft/testsuites/xfstests/xfstests.py index f6f82fcf03..13fac51723 100644 --- a/microsoft/testsuites/xfstests/xfstests.py +++ b/microsoft/testsuites/xfstests/xfstests.py @@ -532,7 +532,6 @@ def set_excluded_tests(self, exclude_tests: str) -> None: for exclude_test in exclude_tests.split(): echo.write_to_file(exclude_test, exclude_file_path, append=True) - # add more usable details in subtest additional information field def create_send_subtest_msg( self, test_result: "TestResult", @@ -824,10 +823,9 @@ def create_xfstest_stack_info( test_class = case.split("/")[0] test_id = case.split("/")[1] result_path = xfstests_path / f"results/{test_section}/{test_class}" - # this needs to be fixed as it's spilling over to console output. - # ls_tool = self.node.tools[Ls] cat_tool = self.node.tools[Cat] result = "" + # note: ls tool is not used here due to performance issues. if not self.node.shell.exists(result_path): self._log.debug(f"No files found in path {result_path}") # non terminating error !!! From 1409f4cd89a4133fdcb79703f48eabe53d37b1a2 Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Wed, 26 Mar 2025 18:21:27 +0530 Subject: [PATCH 36/37] reverted verify_azure_file_share ti not using try and finally block, relying on LISA for resource life cycle management --- microsoft/testsuites/xfstests/xfstesting.py | 78 +++++++-------------- 1 file changed, 25 insertions(+), 53 deletions(-) diff --git a/microsoft/testsuites/xfstests/xfstesting.py b/microsoft/testsuites/xfstests/xfstesting.py index 916692dbfc..55fb019171 100644 --- a/microsoft/testsuites/xfstests/xfstesting.py +++ b/microsoft/testsuites/xfstests/xfstesting.py @@ -571,16 +571,9 @@ def verify_azure_file_share( node = cast(RemoteNode, environment.nodes[0]) if not node.tools[KernelConfig].is_enabled("CONFIG_CIFS"): raise UnsupportedDistroException( - node.os, "current distro not enable cifs module." + node.os, "current distro is not enabled with cifs module." ) xfstests = self._install_xfstests(node) - # These local variables are needed to track resource retention - # on demand / test failure. - # This is to ensure that the storage account is not deleted - # if the test fails and the keep_environment is set to "always" or "failed" - keep_environment = environment.platform.runbook.keep_environment - test_failed: bool = False - azure_file_share = node.features[AzureFileShare] random_str = generate_random_chars(string.ascii_lowercase + string.digits, 10) file_share_name = f"lisa{random_str}fs" @@ -598,51 +591,30 @@ def verify_azure_file_share( }, azure_file_share=azure_file_share, ) - try: - # Create Xfstest config - xfstests.set_local_config( - scratch_dev=fs_url_dict[scratch_name], - scratch_mnt=_scratch_folder, - test_dev=fs_url_dict[file_share_name], - test_folder=_test_folder, - file_system="cifs", - test_section="cifs", - mount_opts=mount_opts, - testfs_mount_opts=mount_opts, - overwrite_config=True, - ) - # Create excluded test file - xfstests.set_excluded_tests(_default_smb_excluded_tests) - # run the test - log.info("Running xfstests against azure file share") - xfstests.run_test( - test_section="cifs", - test_group="cifs/quick", - log_path=log_path, - result=result, - test_cases=_default_smb_testcases, - timeout=self.TIME_OUT - 30, - ) - finally: - # If test_failed is true and keep_environment is Always / Failed, we keep - # the storage account, else we delete it. - if result.status == "FAILED": - test_failed = True - if keep_environment in ["failed", "always"]: - if test_failed is True: - log.info("Keeping Azure file share for manual testing.") - else: - log.info( - "Keeping Azure file share as keep_environment is set to 'True'." - ) - else: - log.info( - "Deleting Azure file share as keep_environment is set to 'False'." - ) - # this will ensure that the expensive storage resources are decom - # before the test case ends. - # This is important to avoid incurring unnecessary costs. - azure_file_share.delete_azure_fileshare([file_share_name, scratch_name]) + # Create Xfstest config + xfstests.set_local_config( + scratch_dev=fs_url_dict[scratch_name], + scratch_mnt=_scratch_folder, + test_dev=fs_url_dict[file_share_name], + test_folder=_test_folder, + file_system="cifs", + test_section="cifs", + mount_opts=mount_opts, + testfs_mount_opts=mount_opts, + overwrite_config=True, + ) + # Create excluded test file + xfstests.set_excluded_tests(_default_smb_excluded_tests) + # run the test + log.info("Running xfstests against azure file share") + xfstests.run_test( + test_section="cifs", + test_group="cifs/quick", + log_path=log_path, + result=result, + test_cases=_default_smb_testcases, + timeout=self.TIME_OUT - 30, + ) def after_case(self, log: Logger, **kwargs: Any) -> None: try: From e07cb2a6a7ac39db06966fe714de4a806d2ba317 Mon Sep 17 00:00:00 2001 From: "Shekhar Sorot ( MSFT )" Date: Fri, 28 Mar 2025 11:36:23 +0530 Subject: [PATCH 37/37] added comments to xfstst.py for create_xfstest_stack_info() method --- microsoft/testsuites/xfstests/xfstests.py | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/microsoft/testsuites/xfstests/xfstests.py b/microsoft/testsuites/xfstests/xfstests.py index 13fac51723..bc44df7098 100644 --- a/microsoft/testsuites/xfstests/xfstests.py +++ b/microsoft/testsuites/xfstests/xfstests.py @@ -828,7 +828,27 @@ def create_xfstest_stack_info( # note: ls tool is not used here due to performance issues. if not self.node.shell.exists(result_path): self._log.debug(f"No files found in path {result_path}") - # non terminating error !!! + # Note: This is a non terminating error. + # Do not force an exception for this definition in the future !!! + # Reason : XFStest in certain conditions will not generate any output + # for specific tests. these output include *.full, *.out and *.out.fail + # This also holds true for optional output files such as *.dmesg + # and *.notrun + # This however does not means that the subtest has failed. We can and + # still use xfstests.log output to parse subtest count and extract + # failed test status and messages in regular case. + # Conditions for failure : + # 1. XFStests.log is not found + # 2. XFStests.log is empty + # 3. XFStests.log EOF does not contains test summary ( implies proc fail ) + # 4. Loss of SSH connection that cannot be re-established + # Conditions not for test failure : + # 1. No files found in results directory + # 2. No files found for specific test case status, i.e notrun or dmesg + # 3. No files found for specific test case status, i.e full or out.bad + # 4. Any other file output when xfstests.log states test status with message + # 5. Any other file output when xfstests.log states test status without + # 6. XFStests.log footer contains test summary ( implies proc success ) result = f"No files found in path {result_path}" else: # Prepare file paths