From patchwork Wed Dec 20 10:33:27 2023 Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 8bit X-Patchwork-Submitter: =?utf-8?q?Juraj_Linke=C5=A1?= X-Patchwork-Id: 135388 X-Patchwork-Delegate: thomas@monjalon.net Return-Path: X-Original-To: patchwork@inbox.dpdk.org Delivered-To: patchwork@inbox.dpdk.org Received: from mails.dpdk.org (mails.dpdk.org [217.70.189.124]) by inbox.dpdk.org (Postfix) with ESMTP id 4B7EC43723; Wed, 20 Dec 2023 11:33:45 +0100 (CET) Received: from mails.dpdk.org (localhost [127.0.0.1]) by mails.dpdk.org (Postfix) with ESMTP id A33E84069F; Wed, 20 Dec 2023 11:33:39 +0100 (CET) Received: from mail-ed1-f51.google.com (mail-ed1-f51.google.com [209.85.208.51]) by mails.dpdk.org (Postfix) with ESMTP id 75B3140266 for ; Wed, 20 Dec 2023 11:33:37 +0100 (CET) Received: by mail-ed1-f51.google.com with SMTP id 4fb4d7f45d1cf-54c5d041c23so6742473a12.2 for ; Wed, 20 Dec 2023 02:33:37 -0800 (PST) DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed; d=pantheon.tech; s=google; t=1703068417; x=1703673217; darn=dpdk.org; h=content-transfer-encoding:mime-version:references:in-reply-to :message-id:date:subject:cc:to:from:from:to:cc:subject:date :message-id:reply-to; bh=Q0u/kk4T6JqRIvq8MzQsLP56aHeYd/gLkYhJLWMm+eY=; b=PDZqxFdmsc3pijUILI+q4mvFzfomspRJEU8QqslVstVVShRc2Xb9pQ/nzBlm+uyFp/ FgP9uA0hO+OhhCzo8wsSVd6RKJSaEUy3TkzJgzZ6Cmiw4KWpBeiKKf6GNmfW5IzFAnZY 0bEAjvd12YS81cU528yeT9fPJFwSf1MizKq0HVfX1YFRD5vtYhN7SkNbFEr3BjgI/AEJ 5b7LmIfRyOuXMD1L+O4EO3Eo5/QOr8mhZYujiSqdg7oBccTFZNzzFz4Kb1/kH2fE4tML 42NvNZwMCS/JPjkW+7fognO7WxN1Igyj5LFRDa+Igy26bfjKKDg18hXuIBqKSHYDVwU7 lRmw== X-Google-DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed; d=1e100.net; s=20230601; t=1703068417; x=1703673217; h=content-transfer-encoding:mime-version:references:in-reply-to :message-id:date:subject:cc:to:from:x-gm-message-state:from:to:cc :subject:date:message-id:reply-to; bh=Q0u/kk4T6JqRIvq8MzQsLP56aHeYd/gLkYhJLWMm+eY=; b=gc+z6ConrkkAgDYOgbBr+dsw953ypsymc9uqYoQame4+j4GnSoEAw+QTiNQWzEEBJt 6OVDOyaXY+N2gmyYXBQhLZKcACeFgXPA5XWQVVXy7uaPixMHbVBmKEdd4DR/fc9H2+zr 8xh7+MPn8cSZMxyWI57dQkij0IJgNo/FY5hbbLnrUK85vCuMuNlyg6ibeuvvexxR0E9F wC7IAhO7ghraJr1EkawGVSDAkKST73esqbyfyWAAK5sz/wiKearuH/UdvEgbjyqprI4t ovuLGDcE9llpEHOHdbcfuzqJLiF/ZolaQRS1lQuvwMYf0g5G1JgBPEiCwAkxKFLgUoh8 Vi4Q== X-Gm-Message-State: AOJu0YxBsnZYNaibDnhlcrLpO4+V88dqEpc+UmMHSxj2RpdyOm03pAnt 8UFa2fZvpAbgEQi1F0/ytOmE/g== X-Google-Smtp-Source: AGHT+IHMpsbJe952KcyJUfPWJ7N3UC6r8WSSGvIjIZntTxehIhyjFkgl32ky6nb0ugnEw0UZedtVuQ== X-Received: by 2002:a50:aad0:0:b0:553:3a99:4ca7 with SMTP id r16-20020a50aad0000000b005533a994ca7mr1599109edc.78.1703068416910; Wed, 20 Dec 2023 02:33:36 -0800 (PST) Received: from jlinkes-PT-Latitude-5530.pantheon.local (81.89.53.154.host.vnet.sk. [81.89.53.154]) by smtp.gmail.com with ESMTPSA id bd18-20020a056402207200b00542db304680sm12588981edb.63.2023.12.20.02.33.35 (version=TLS1_3 cipher=TLS_AES_256_GCM_SHA384 bits=256/256); Wed, 20 Dec 2023 02:33:36 -0800 (PST) From: =?utf-8?q?Juraj_Linke=C5=A1?= To: thomas@monjalon.net, Honnappa.Nagarahalli@arm.com, jspewock@iol.unh.edu, probb@iol.unh.edu, paul.szczepanek@arm.com, yoan.picchi@foss.arm.com, Luca.Vizzarro@arm.com Cc: dev@dpdk.org, =?utf-8?q?Juraj_Linke=C5=A1?= Subject: [RFC PATCH v1 1/5] dts: convert dts.py methods to class Date: Wed, 20 Dec 2023 11:33:27 +0100 Message-Id: <20231220103331.60888-2-juraj.linkes@pantheon.tech> X-Mailer: git-send-email 2.34.1 In-Reply-To: <20231220103331.60888-1-juraj.linkes@pantheon.tech> References: <20231220103331.60888-1-juraj.linkes@pantheon.tech> MIME-Version: 1.0 X-BeenThere: dev@dpdk.org X-Mailman-Version: 2.1.29 Precedence: list List-Id: DPDK patches and discussions List-Unsubscribe: , List-Archive: List-Post: List-Help: List-Subscribe: , Errors-To: dev-bounces@dpdk.org The dts.py module deviates from the rest of the code without a clear reason. Converting it into a class and using better naming will improve organization and code readability. Signed-off-by: Juraj Linkeš --- dts/framework/config/__init__.py | 3 - dts/framework/dts.py | 228 ----------------------------- dts/framework/runner.py | 243 +++++++++++++++++++++++++++++++ dts/main.py | 6 +- 4 files changed, 247 insertions(+), 233 deletions(-) delete mode 100644 dts/framework/dts.py create mode 100644 dts/framework/runner.py diff --git a/dts/framework/config/__init__.py b/dts/framework/config/__init__.py index 9b32cf0532..497847afb9 100644 --- a/dts/framework/config/__init__.py +++ b/dts/framework/config/__init__.py @@ -314,6 +314,3 @@ def load_config() -> Configuration: config: dict[str, Any] = warlock.model_factory(schema, name="_Config")(config_data) config_obj: Configuration = Configuration.from_dict(dict(config)) return config_obj - - -CONFIGURATION = load_config() diff --git a/dts/framework/dts.py b/dts/framework/dts.py deleted file mode 100644 index 25d6942d81..0000000000 --- a/dts/framework/dts.py +++ /dev/null @@ -1,228 +0,0 @@ -# SPDX-License-Identifier: BSD-3-Clause -# Copyright(c) 2010-2019 Intel Corporation -# Copyright(c) 2022-2023 PANTHEON.tech s.r.o. -# Copyright(c) 2022-2023 University of New Hampshire - -import sys - -from .config import ( - CONFIGURATION, - BuildTargetConfiguration, - ExecutionConfiguration, - TestSuiteConfig, -) -from .exception import BlockingTestSuiteError -from .logger import DTSLOG, getLogger -from .test_result import BuildTargetResult, DTSResult, ExecutionResult, Result -from .test_suite import get_test_suites -from .testbed_model import SutNode, TGNode -from .utils import check_dts_python_version - -dts_logger: DTSLOG = getLogger("DTSRunner") -result: DTSResult = DTSResult(dts_logger) - - -def run_all() -> None: - """ - The main process of DTS. Runs all build targets in all executions from the main - config file. - """ - global dts_logger - global result - - # check the python version of the server that run dts - check_dts_python_version() - - sut_nodes: dict[str, SutNode] = {} - tg_nodes: dict[str, TGNode] = {} - try: - # for all Execution sections - for execution in CONFIGURATION.executions: - sut_node = sut_nodes.get(execution.system_under_test_node.name) - tg_node = tg_nodes.get(execution.traffic_generator_node.name) - - try: - if not sut_node: - sut_node = SutNode(execution.system_under_test_node) - sut_nodes[sut_node.name] = sut_node - if not tg_node: - tg_node = TGNode(execution.traffic_generator_node) - tg_nodes[tg_node.name] = tg_node - result.update_setup(Result.PASS) - except Exception as e: - failed_node = execution.system_under_test_node.name - if sut_node: - failed_node = execution.traffic_generator_node.name - dts_logger.exception(f"Creation of node {failed_node} failed.") - result.update_setup(Result.FAIL, e) - - else: - _run_execution(sut_node, tg_node, execution, result) - - except Exception as e: - dts_logger.exception("An unexpected error has occurred.") - result.add_error(e) - raise - - finally: - try: - for node in (sut_nodes | tg_nodes).values(): - node.close() - result.update_teardown(Result.PASS) - except Exception as e: - dts_logger.exception("Final cleanup of nodes failed.") - result.update_teardown(Result.ERROR, e) - - # we need to put the sys.exit call outside the finally clause to make sure - # that unexpected exceptions will propagate - # in that case, the error that should be reported is the uncaught exception as - # that is a severe error originating from the framework - # at that point, we'll only have partial results which could be impacted by the - # error causing the uncaught exception, making them uninterpretable - _exit_dts() - - -def _run_execution( - sut_node: SutNode, - tg_node: TGNode, - execution: ExecutionConfiguration, - result: DTSResult, -) -> None: - """ - Run the given execution. This involves running the execution setup as well as - running all build targets in the given execution. - """ - dts_logger.info(f"Running execution with SUT '{execution.system_under_test_node.name}'.") - execution_result = result.add_execution(sut_node.config) - execution_result.add_sut_info(sut_node.node_info) - - try: - sut_node.set_up_execution(execution) - execution_result.update_setup(Result.PASS) - except Exception as e: - dts_logger.exception("Execution setup failed.") - execution_result.update_setup(Result.FAIL, e) - - else: - for build_target in execution.build_targets: - _run_build_target(sut_node, tg_node, build_target, execution, execution_result) - - finally: - try: - sut_node.tear_down_execution() - execution_result.update_teardown(Result.PASS) - except Exception as e: - dts_logger.exception("Execution teardown failed.") - execution_result.update_teardown(Result.FAIL, e) - - -def _run_build_target( - sut_node: SutNode, - tg_node: TGNode, - build_target: BuildTargetConfiguration, - execution: ExecutionConfiguration, - execution_result: ExecutionResult, -) -> None: - """ - Run the given build target. - """ - dts_logger.info(f"Running build target '{build_target.name}'.") - build_target_result = execution_result.add_build_target(build_target) - - try: - sut_node.set_up_build_target(build_target) - result.dpdk_version = sut_node.dpdk_version - build_target_result.add_build_target_info(sut_node.get_build_target_info()) - build_target_result.update_setup(Result.PASS) - except Exception as e: - dts_logger.exception("Build target setup failed.") - build_target_result.update_setup(Result.FAIL, e) - - else: - _run_all_suites(sut_node, tg_node, execution, build_target_result) - - finally: - try: - sut_node.tear_down_build_target() - build_target_result.update_teardown(Result.PASS) - except Exception as e: - dts_logger.exception("Build target teardown failed.") - build_target_result.update_teardown(Result.FAIL, e) - - -def _run_all_suites( - sut_node: SutNode, - tg_node: TGNode, - execution: ExecutionConfiguration, - build_target_result: BuildTargetResult, -) -> None: - """ - Use the given build_target to run execution's test suites - with possibly only a subset of test cases. - If no subset is specified, run all test cases. - """ - end_build_target = False - if not execution.skip_smoke_tests: - execution.test_suites[:0] = [TestSuiteConfig.from_dict("smoke_tests")] - for test_suite_config in execution.test_suites: - try: - _run_single_suite(sut_node, tg_node, execution, build_target_result, test_suite_config) - except BlockingTestSuiteError as e: - dts_logger.exception( - f"An error occurred within {test_suite_config.test_suite}. Skipping build target." - ) - result.add_error(e) - end_build_target = True - # if a blocking test failed and we need to bail out of suite executions - if end_build_target: - break - - -def _run_single_suite( - sut_node: SutNode, - tg_node: TGNode, - execution: ExecutionConfiguration, - build_target_result: BuildTargetResult, - test_suite_config: TestSuiteConfig, -) -> None: - """Runs a single test suite. - - Args: - sut_node: Node to run tests on. - execution: Execution the test case belongs to. - build_target_result: Build target configuration test case is run on - test_suite_config: Test suite configuration - - Raises: - BlockingTestSuiteError: If a test suite that was marked as blocking fails. - """ - try: - full_suite_path = f"tests.TestSuite_{test_suite_config.test_suite}" - test_suite_classes = get_test_suites(full_suite_path) - suites_str = ", ".join((x.__name__ for x in test_suite_classes)) - dts_logger.debug(f"Found test suites '{suites_str}' in '{full_suite_path}'.") - except Exception as e: - dts_logger.exception("An error occurred when searching for test suites.") - result.update_setup(Result.ERROR, e) - - else: - for test_suite_class in test_suite_classes: - test_suite = test_suite_class( - sut_node, - tg_node, - test_suite_config.test_cases, - execution.func, - build_target_result, - ) - test_suite.run() - - -def _exit_dts() -> None: - """ - Process all errors and exit with the proper exit code. - """ - result.process() - - if dts_logger: - dts_logger.info("DTS execution has ended.") - sys.exit(result.get_return_code()) diff --git a/dts/framework/runner.py b/dts/framework/runner.py new file mode 100644 index 0000000000..5b077c5805 --- /dev/null +++ b/dts/framework/runner.py @@ -0,0 +1,243 @@ +# SPDX-License-Identifier: BSD-3-Clause +# Copyright(c) 2010-2019 Intel Corporation +# Copyright(c) 2022-2023 PANTHEON.tech s.r.o. +# Copyright(c) 2022-2023 University of New Hampshire + +import logging +import sys + +from .config import ( + BuildTargetConfiguration, + Configuration, + ExecutionConfiguration, + TestSuiteConfig, +) +from .exception import BlockingTestSuiteError +from .logger import DTSLOG, getLogger +from .test_result import BuildTargetResult, DTSResult, ExecutionResult, Result +from .test_suite import get_test_suites +from .testbed_model import SutNode, TGNode +from .utils import check_dts_python_version + + +class DTSRunner: + _logger: DTSLOG + _result: DTSResult + _configuration: Configuration + + def __init__(self, configuration: Configuration): + self._logger = getLogger("DTSRunner") + self._result = DTSResult(self._logger) + self._configuration = configuration + + def run(self): + """ + The main process of DTS. Runs all build targets in all executions from the main + config file. + """ + # check the python version of the server that run dts + check_dts_python_version() + sut_nodes: dict[str, SutNode] = {} + tg_nodes: dict[str, TGNode] = {} + try: + # for all Execution sections + for execution in self._configuration.executions: + sut_node = sut_nodes.get(execution.system_under_test_node.name) + tg_node = tg_nodes.get(execution.traffic_generator_node.name) + + try: + if not sut_node: + sut_node = SutNode(execution.system_under_test_node) + sut_nodes[sut_node.name] = sut_node + if not tg_node: + tg_node = TGNode(execution.traffic_generator_node) + tg_nodes[tg_node.name] = tg_node + self._result.update_setup(Result.PASS) + except Exception as e: + failed_node = execution.system_under_test_node.name + if sut_node: + failed_node = execution.traffic_generator_node.name + self._logger.exception( + f"The Creation of node {failed_node} failed." + ) + self._result.update_setup(Result.FAIL, e) + + else: + self._run_execution(sut_node, tg_node, execution) + + except Exception as e: + self._logger.exception("An unexpected error has occurred.") + self._result.add_error(e) + raise + + finally: + try: + for node in (sut_nodes | tg_nodes).values(): + node.close() + self._result.update_teardown(Result.PASS) + except Exception as e: + self._logger.exception("The final cleanup of nodes failed.") + self._result.update_teardown(Result.ERROR, e) + + # we need to put the sys.exit call outside the finally clause to make sure + # that unexpected exceptions will propagate + # in that case, the error that should be reported is the uncaught exception as + # that is a severe error originating from the framework + # at that point, we'll only have partial results which could be impacted by the + # error causing the uncaught exception, making them uninterpretable + self._exit_dts() + + def _run_execution( + self, + sut_node: SutNode, + tg_node: TGNode, + execution: ExecutionConfiguration, + ) -> None: + """ + Run the given execution. This involves running the execution setup as well as + running all build targets in the given execution. + """ + self._logger.info( + f"Running execution with SUT '{execution.system_under_test_node.name}'." + ) + execution_result = self._result.add_execution(sut_node.config) + execution_result.add_sut_info(sut_node.node_info) + + try: + sut_node.set_up_execution(execution) + execution_result.update_setup(Result.PASS) + except Exception as e: + self._logger.exception("Execution setup failed.") + execution_result.update_setup(Result.FAIL, e) + + else: + for build_target in execution.build_targets: + self._run_build_target( + sut_node, tg_node, build_target, execution, execution_result + ) + + finally: + try: + sut_node.tear_down_execution() + execution_result.update_teardown(Result.PASS) + except Exception as e: + self._logger.exception("Execution teardown failed.") + execution_result.update_teardown(Result.FAIL, e) + + def _run_build_target( + self, + sut_node: SutNode, + tg_node: TGNode, + build_target: BuildTargetConfiguration, + execution: ExecutionConfiguration, + execution_result: ExecutionResult, + ) -> None: + """ + Run the given build target. + """ + self._logger.info(f"Running build target '{build_target.name}'.") + build_target_result = execution_result.add_build_target(build_target) + + try: + sut_node.set_up_build_target(build_target) + self._result.dpdk_version = sut_node.dpdk_version + build_target_result.add_build_target_info(sut_node.get_build_target_info()) + build_target_result.update_setup(Result.PASS) + except Exception as e: + self._logger.exception("Build target setup failed.") + build_target_result.update_setup(Result.FAIL, e) + + else: + self._run_all_suites(sut_node, tg_node, execution, build_target_result) + + finally: + try: + sut_node.tear_down_build_target() + build_target_result.update_teardown(Result.PASS) + except Exception as e: + self._logger.exception("Build target teardown failed.") + build_target_result.update_teardown(Result.FAIL, e) + + def _run_all_suites( + self, + sut_node: SutNode, + tg_node: TGNode, + execution: ExecutionConfiguration, + build_target_result: BuildTargetResult, + ) -> None: + """ + Use the given build_target to run execution's test suites + with possibly only a subset of test cases. + If no subset is specified, run all test cases. + """ + end_build_target = False + if not execution.skip_smoke_tests: + execution.test_suites[:0] = [TestSuiteConfig.from_dict("smoke_tests")] + for test_suite_config in execution.test_suites: + try: + self._run_single_suite( + sut_node, tg_node, execution, build_target_result, test_suite_config + ) + except BlockingTestSuiteError as e: + self._logger.exception( + f"An error occurred within {test_suite_config.test_suite}. " + "Skipping build target..." + ) + self._result.add_error(e) + end_build_target = True + # if a blocking test failed and we need to bail out of suite executions + if end_build_target: + break + + def _run_single_suite( + self, + sut_node: SutNode, + tg_node: TGNode, + execution: ExecutionConfiguration, + build_target_result: BuildTargetResult, + test_suite_config: TestSuiteConfig, + ) -> None: + """Runs a single test suite. + + Args: + sut_node: Node to run tests on. + execution: Execution the test case belongs to. + build_target_result: Build target configuration test case is run on + test_suite_config: Test suite configuration + + Raises: + BlockingTestSuiteError: If a test suite that was marked as blocking fails. + """ + try: + full_suite_path = f"tests.TestSuite_{test_suite_config.test_suite}" + test_suite_classes = get_test_suites(full_suite_path) + suites_str = ", ".join((x.__name__ for x in test_suite_classes)) + self._logger.debug( + f"Found test suites '{suites_str}' in '{full_suite_path}'." + ) + except Exception as e: + self._logger.exception("An error occurred when searching for test suites.") + self._result.update_setup(Result.ERROR, e) + + else: + for test_suite_class in test_suite_classes: + test_suite = test_suite_class( + sut_node, + tg_node, + test_suite_config.test_cases, + execution.func, + build_target_result, + ) + test_suite.run() + + def _exit_dts(self) -> None: + """ + Process all errors and exit with the proper exit code. + """ + self._result.process() + + if self._logger: + self._logger.info("DTS execution has ended.") + + logging.shutdown() + sys.exit(self._result.get_return_code()) diff --git a/dts/main.py b/dts/main.py index 43311fa847..879ce5cb89 100755 --- a/dts/main.py +++ b/dts/main.py @@ -10,11 +10,13 @@ import logging -from framework import dts +from framework.config import load_config +from framework.runner import DTSRunner def main() -> None: - dts.run_all() + dts = DTSRunner(configuration=load_config()) + dts.run() # Main program begins here From patchwork Wed Dec 20 10:33:28 2023 Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 8bit X-Patchwork-Submitter: =?utf-8?q?Juraj_Linke=C5=A1?= X-Patchwork-Id: 135389 X-Patchwork-Delegate: thomas@monjalon.net Return-Path: X-Original-To: patchwork@inbox.dpdk.org Delivered-To: patchwork@inbox.dpdk.org Received: from mails.dpdk.org (mails.dpdk.org [217.70.189.124]) by inbox.dpdk.org (Postfix) with ESMTP id 08F4643723; Wed, 20 Dec 2023 11:33:52 +0100 (CET) Received: from mails.dpdk.org (localhost [127.0.0.1]) by mails.dpdk.org (Postfix) with ESMTP id BCDA940E96; Wed, 20 Dec 2023 11:33:41 +0100 (CET) Received: from mail-ed1-f43.google.com (mail-ed1-f43.google.com [209.85.208.43]) by mails.dpdk.org (Postfix) with ESMTP id 090F740A80 for ; Wed, 20 Dec 2023 11:33:40 +0100 (CET) Received: by mail-ed1-f43.google.com with SMTP id 4fb4d7f45d1cf-5532b348d30so4291860a12.1 for ; Wed, 20 Dec 2023 02:33:40 -0800 (PST) DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed; d=pantheon.tech; s=google; t=1703068419; x=1703673219; darn=dpdk.org; h=content-transfer-encoding:mime-version:references:in-reply-to :message-id:date:subject:cc:to:from:from:to:cc:subject:date :message-id:reply-to; bh=JsbpHYpWeoH4MSajR9/GoFfMDHdjZc9/Q/tLniUsi84=; b=r8ukcduDPQlY29lHAzHQGHPhxE2lOplKHzBfjNc4xZ/cceUEdDYWdgLdI30qpjyBUS ZQmacpsV0v6+WtS71W31O7ybRm54JhATMwpA/qKpsryHjvkXLLTjWMboH0g6zz9mzdVE xjmVcwqWXSR4HBk1CuAI2YQHruCIiRV+Yr/sM/fTgFHBBEvSRlxML3Dolff9t3XRHGxC DYyNEJ9iqM2PS/Ztdq1+zKomQExz5kIsp9oB6I681bRahlPZvvBnAXq+1EPaLdQLNEE1 3YGBWRwCvTWaEg75Teq3pJGw1JU/B52kNpoqKKgyfptnRrcW3dfUHQ5dXP0E7aJPfkMs QgxQ== X-Google-DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed; d=1e100.net; s=20230601; t=1703068419; x=1703673219; h=content-transfer-encoding:mime-version:references:in-reply-to :message-id:date:subject:cc:to:from:x-gm-message-state:from:to:cc :subject:date:message-id:reply-to; bh=JsbpHYpWeoH4MSajR9/GoFfMDHdjZc9/Q/tLniUsi84=; b=LGGRmqdUPhPpiJ7eo/wFexW09yiN/rj/CaJfIrp70x01B/ftD4lwCieVM8/FIemyLk uyAstS6OHP36FVPcFWsAs3tOMmjoH2hdscLr4On94WhZIUo58cGHS3kA8BS/JN44Srs+ 5lhUl8ZwCQQyykTC0kmmvz4o+Zu7vml/G7MCmUouwXDxwoZNNZQ6R7VS3LmjmZNGJ9KB 9+HyDpodmf8+ovWqjKm3PnKu+JV11QN1P0M35t1rxO+52C589/FO/sLUrVmbiZyVpJMA 0hTqDbrwc9njee3XpdF9haML4mjC6lD5LwgPJP1tiKG9EIlHbsLjuoFgfvENSSxx6LNd y+zw== X-Gm-Message-State: AOJu0YzoRBNXe+zySNhZLcneBgpRy6Fj6y87eVoMTondyqm+jX/A/Jlv akDkQUrkA2FlVBO9b40ftmDcRA== X-Google-Smtp-Source: AGHT+IFh6LfV2/AHAYHwcVmb5knjA2/bR81yPy9NbPP1EcGcKj4+oF3XbkwuwDYo6+WKkRAR2PoSvg== X-Received: by 2002:a50:cd55:0:b0:553:2294:8170 with SMTP id d21-20020a50cd55000000b0055322948170mr3618843edj.11.1703068419559; Wed, 20 Dec 2023 02:33:39 -0800 (PST) Received: from jlinkes-PT-Latitude-5530.pantheon.local (81.89.53.154.host.vnet.sk. [81.89.53.154]) by smtp.gmail.com with ESMTPSA id bd18-20020a056402207200b00542db304680sm12588981edb.63.2023.12.20.02.33.37 (version=TLS1_3 cipher=TLS_AES_256_GCM_SHA384 bits=256/256); Wed, 20 Dec 2023 02:33:38 -0800 (PST) From: =?utf-8?q?Juraj_Linke=C5=A1?= To: thomas@monjalon.net, Honnappa.Nagarahalli@arm.com, jspewock@iol.unh.edu, probb@iol.unh.edu, paul.szczepanek@arm.com, yoan.picchi@foss.arm.com, Luca.Vizzarro@arm.com Cc: dev@dpdk.org, =?utf-8?q?Juraj_Linke=C5=A1?= Subject: [RFC PATCH v1 2/5] dts: move test suite execution logic to DTSRunner Date: Wed, 20 Dec 2023 11:33:28 +0100 Message-Id: <20231220103331.60888-3-juraj.linkes@pantheon.tech> X-Mailer: git-send-email 2.34.1 In-Reply-To: <20231220103331.60888-1-juraj.linkes@pantheon.tech> References: <20231220103331.60888-1-juraj.linkes@pantheon.tech> MIME-Version: 1.0 X-BeenThere: dev@dpdk.org X-Mailman-Version: 2.1.29 Precedence: list List-Id: DPDK patches and discussions List-Unsubscribe: , List-Archive: List-Post: List-Help: List-Subscribe: , Errors-To: dev-bounces@dpdk.org Move the code responsible for running the test suite from the TestSuite class to the DTSRunner class. This restructuring decision was made to consolidate and unify the related logic into a single unit. Signed-off-by: Juraj Linkeš --- dts/framework/runner.py | 156 +++++++++++++++++++++++++++++++++--- dts/framework/test_suite.py | 134 +------------------------------ 2 files changed, 147 insertions(+), 143 deletions(-) diff --git a/dts/framework/runner.py b/dts/framework/runner.py index 5b077c5805..5e145a8066 100644 --- a/dts/framework/runner.py +++ b/dts/framework/runner.py @@ -5,6 +5,7 @@ import logging import sys +from types import MethodType from .config import ( BuildTargetConfiguration, @@ -12,10 +13,18 @@ ExecutionConfiguration, TestSuiteConfig, ) -from .exception import BlockingTestSuiteError +from .exception import BlockingTestSuiteError, SSHTimeoutError, TestCaseVerifyError from .logger import DTSLOG, getLogger -from .test_result import BuildTargetResult, DTSResult, ExecutionResult, Result -from .test_suite import get_test_suites +from .settings import SETTINGS +from .test_result import ( + BuildTargetResult, + DTSResult, + ExecutionResult, + Result, + TestCaseResult, + TestSuiteResult, +) +from .test_suite import TestSuite, get_test_suites from .testbed_model import SutNode, TGNode from .utils import check_dts_python_version @@ -148,7 +157,7 @@ def _run_build_target( build_target_result.update_setup(Result.FAIL, e) else: - self._run_all_suites(sut_node, tg_node, execution, build_target_result) + self._run_test_suites(sut_node, tg_node, execution, build_target_result) finally: try: @@ -158,7 +167,7 @@ def _run_build_target( self._logger.exception("Build target teardown failed.") build_target_result.update_teardown(Result.FAIL, e) - def _run_all_suites( + def _run_test_suites( self, sut_node: SutNode, tg_node: TGNode, @@ -175,7 +184,7 @@ def _run_all_suites( execution.test_suites[:0] = [TestSuiteConfig.from_dict("smoke_tests")] for test_suite_config in execution.test_suites: try: - self._run_single_suite( + self._run_test_suite( sut_node, tg_node, execution, build_target_result, test_suite_config ) except BlockingTestSuiteError as e: @@ -189,7 +198,7 @@ def _run_all_suites( if end_build_target: break - def _run_single_suite( + def _run_test_suite( self, sut_node: SutNode, tg_node: TGNode, @@ -198,6 +207,9 @@ def _run_single_suite( test_suite_config: TestSuiteConfig, ) -> None: """Runs a single test suite. + Setup, execute and teardown the whole suite. + Suite execution consists of running all test cases scheduled to be executed. + A test cast run consists of setup, execution and teardown of said test case. Args: sut_node: Node to run tests on. @@ -222,13 +234,131 @@ def _run_single_suite( else: for test_suite_class in test_suite_classes: test_suite = test_suite_class( - sut_node, - tg_node, - test_suite_config.test_cases, - execution.func, - build_target_result, + sut_node, tg_node, test_suite_config.test_cases + ) + + test_suite_name = test_suite.__class__.__name__ + test_suite_result = build_target_result.add_test_suite(test_suite_name) + try: + self._logger.info(f"Starting test suite setup: {test_suite_name}") + test_suite.set_up_suite() + test_suite_result.update_setup(Result.PASS) + self._logger.info(f"Test suite setup successful: {test_suite_name}") + except Exception as e: + self._logger.exception(f"Test suite setup ERROR: {test_suite_name}") + test_suite_result.update_setup(Result.ERROR, e) + + else: + self._execute_test_suite( + execution.func, test_suite, test_suite_result + ) + + finally: + try: + test_suite.tear_down_suite() + sut_node.kill_cleanup_dpdk_apps() + test_suite_result.update_teardown(Result.PASS) + except Exception as e: + self._logger.exception( + f"Test suite teardown ERROR: {test_suite_name}" + ) + self._logger.warning( + f"Test suite '{test_suite_name}' teardown failed, " + f"the next test suite may be affected." + ) + test_suite_result.update_setup(Result.ERROR, e) + if ( + len(test_suite_result.get_errors()) > 0 + and test_suite.is_blocking + ): + raise BlockingTestSuiteError(test_suite_name) + + def _execute_test_suite( + self, func: bool, test_suite: TestSuite, test_suite_result: TestSuiteResult + ) -> None: + """ + Execute all test cases scheduled to be executed in this suite. + """ + if func: + for test_case_method in test_suite._get_functional_test_cases(): + test_case_name = test_case_method.__name__ + test_case_result = test_suite_result.add_test_case(test_case_name) + all_attempts = SETTINGS.re_run + 1 + attempt_nr = 1 + self._run_test_case(test_suite, test_case_method, test_case_result) + while not test_case_result and attempt_nr < all_attempts: + attempt_nr += 1 + self._logger.info( + f"Re-running FAILED test case '{test_case_name}'. " + f"Attempt number {attempt_nr} out of {all_attempts}." + ) + self._run_test_case(test_suite, test_case_method, test_case_result) + + def _run_test_case( + self, + test_suite: TestSuite, + test_case_method: MethodType, + test_case_result: TestCaseResult, + ) -> None: + """ + Setup, execute and teardown a test case in this suite. + Exceptions are caught and recorded in logs and results. + """ + test_case_name = test_case_method.__name__ + + try: + # run set_up function for each case + test_suite.set_up_test_case() + test_case_result.update_setup(Result.PASS) + except SSHTimeoutError as e: + self._logger.exception(f"Test case setup FAILED: {test_case_name}") + test_case_result.update_setup(Result.FAIL, e) + except Exception as e: + self._logger.exception(f"Test case setup ERROR: {test_case_name}") + test_case_result.update_setup(Result.ERROR, e) + + else: + # run test case if setup was successful + self._execute_test_case(test_case_method, test_case_result) + + finally: + try: + test_suite.tear_down_test_case() + test_case_result.update_teardown(Result.PASS) + except Exception as e: + self._logger.exception(f"Test case teardown ERROR: {test_case_name}") + self._logger.warning( + f"Test case '{test_case_name}' teardown failed, " + f"the next test case may be affected." ) - test_suite.run() + test_case_result.update_teardown(Result.ERROR, e) + test_case_result.update(Result.ERROR) + + def _execute_test_case( + self, test_case_method: MethodType, test_case_result: TestCaseResult + ) -> None: + """ + Execute one test case and handle failures. + """ + test_case_name = test_case_method.__name__ + try: + self._logger.info(f"Starting test case execution: {test_case_name}") + test_case_method() + test_case_result.update(Result.PASS) + self._logger.info(f"Test case execution PASSED: {test_case_name}") + + except TestCaseVerifyError as e: + self._logger.exception(f"Test case execution FAILED: {test_case_name}") + test_case_result.update(Result.FAIL, e) + except Exception as e: + self._logger.exception(f"Test case execution ERROR: {test_case_name}") + test_case_result.update(Result.ERROR, e) + except KeyboardInterrupt: + self._logger.error( + f"Test case execution INTERRUPTED by user: {test_case_name}" + ) + test_case_result.update(Result.SKIP) + raise KeyboardInterrupt("Stop DTS") def _exit_dts(self) -> None: """ diff --git a/dts/framework/test_suite.py b/dts/framework/test_suite.py index 4a7907ec33..e96305deb0 100644 --- a/dts/framework/test_suite.py +++ b/dts/framework/test_suite.py @@ -17,15 +17,9 @@ from scapy.layers.l2 import Ether # type: ignore[import] from scapy.packet import Packet, Padding # type: ignore[import] -from .exception import ( - BlockingTestSuiteError, - ConfigurationError, - SSHTimeoutError, - TestCaseVerifyError, -) +from .exception import ConfigurationError, TestCaseVerifyError from .logger import DTSLOG, getLogger from .settings import SETTINGS -from .test_result import BuildTargetResult, Result, TestCaseResult, TestSuiteResult from .testbed_model import SutNode, TGNode from .testbed_model.hw.port import Port, PortLink from .utils import get_packet_summaries @@ -50,11 +44,10 @@ class TestSuite(object): """ sut_node: SutNode + tg_node: TGNode is_blocking = False _logger: DTSLOG _test_cases_to_run: list[str] - _func: bool - _result: TestSuiteResult _port_links: list[PortLink] _sut_port_ingress: Port _sut_port_egress: Port @@ -69,17 +62,13 @@ def __init__( self, sut_node: SutNode, tg_node: TGNode, - test_cases: list[str], - func: bool, - build_target_result: BuildTargetResult, + test_cases_to_run: list[str], ): self.sut_node = sut_node self.tg_node = tg_node self._logger = getLogger(self.__class__.__name__) - self._test_cases_to_run = test_cases + self._test_cases_to_run = test_cases_to_run self._test_cases_to_run.extend(SETTINGS.test_cases) - self._func = func - self._result = build_target_result.add_test_suite(self.__class__.__name__) self._port_links = [] self._process_links() self._sut_port_ingress, self._tg_port_egress = ( @@ -280,60 +269,6 @@ def _verify_l3_packet(self, received_packet: IP, expected_packet: IP) -> bool: return False return True - def run(self) -> None: - """ - Setup, execute and teardown the whole suite. - Suite execution consists of running all test cases scheduled to be executed. - A test cast run consists of setup, execution and teardown of said test case. - """ - test_suite_name = self.__class__.__name__ - - try: - self._logger.info(f"Starting test suite setup: {test_suite_name}") - self.set_up_suite() - self._result.update_setup(Result.PASS) - self._logger.info(f"Test suite setup successful: {test_suite_name}") - except Exception as e: - self._logger.exception(f"Test suite setup ERROR: {test_suite_name}") - self._result.update_setup(Result.ERROR, e) - - else: - self._execute_test_suite() - - finally: - try: - self.tear_down_suite() - self.sut_node.kill_cleanup_dpdk_apps() - self._result.update_teardown(Result.PASS) - except Exception as e: - self._logger.exception(f"Test suite teardown ERROR: {test_suite_name}") - self._logger.warning( - f"Test suite '{test_suite_name}' teardown failed, " - f"the next test suite may be affected." - ) - self._result.update_setup(Result.ERROR, e) - if len(self._result.get_errors()) > 0 and self.is_blocking: - raise BlockingTestSuiteError(test_suite_name) - - def _execute_test_suite(self) -> None: - """ - Execute all test cases scheduled to be executed in this suite. - """ - if self._func: - for test_case_method in self._get_functional_test_cases(): - test_case_name = test_case_method.__name__ - test_case_result = self._result.add_test_case(test_case_name) - all_attempts = SETTINGS.re_run + 1 - attempt_nr = 1 - self._run_test_case(test_case_method, test_case_result) - while not test_case_result and attempt_nr < all_attempts: - attempt_nr += 1 - self._logger.info( - f"Re-running FAILED test case '{test_case_name}'. " - f"Attempt number {attempt_nr} out of {all_attempts}." - ) - self._run_test_case(test_case_method, test_case_result) - def _get_functional_test_cases(self) -> list[MethodType]: """ Get all functional test cases. @@ -363,67 +298,6 @@ def _should_be_executed(self, test_case_name: str, test_case_regex: str) -> bool return match - def _run_test_case( - self, test_case_method: MethodType, test_case_result: TestCaseResult - ) -> None: - """ - Setup, execute and teardown a test case in this suite. - Exceptions are caught and recorded in logs and results. - """ - test_case_name = test_case_method.__name__ - - try: - # run set_up function for each case - self.set_up_test_case() - test_case_result.update_setup(Result.PASS) - except SSHTimeoutError as e: - self._logger.exception(f"Test case setup FAILED: {test_case_name}") - test_case_result.update_setup(Result.FAIL, e) - except Exception as e: - self._logger.exception(f"Test case setup ERROR: {test_case_name}") - test_case_result.update_setup(Result.ERROR, e) - - else: - # run test case if setup was successful - self._execute_test_case(test_case_method, test_case_result) - - finally: - try: - self.tear_down_test_case() - test_case_result.update_teardown(Result.PASS) - except Exception as e: - self._logger.exception(f"Test case teardown ERROR: {test_case_name}") - self._logger.warning( - f"Test case '{test_case_name}' teardown failed, " - f"the next test case may be affected." - ) - test_case_result.update_teardown(Result.ERROR, e) - test_case_result.update(Result.ERROR) - - def _execute_test_case( - self, test_case_method: MethodType, test_case_result: TestCaseResult - ) -> None: - """ - Execute one test case and handle failures. - """ - test_case_name = test_case_method.__name__ - try: - self._logger.info(f"Starting test case execution: {test_case_name}") - test_case_method() - test_case_result.update(Result.PASS) - self._logger.info(f"Test case execution PASSED: {test_case_name}") - - except TestCaseVerifyError as e: - self._logger.exception(f"Test case execution FAILED: {test_case_name}") - test_case_result.update(Result.FAIL, e) - except Exception as e: - self._logger.exception(f"Test case execution ERROR: {test_case_name}") - test_case_result.update(Result.ERROR, e) - except KeyboardInterrupt: - self._logger.error(f"Test case execution INTERRUPTED by user: {test_case_name}") - test_case_result.update(Result.SKIP) - raise KeyboardInterrupt("Stop DTS") - def get_test_suites(testsuite_module_path: str) -> list[type[TestSuite]]: def is_test_suite(object) -> bool: From patchwork Wed Dec 20 10:33:29 2023 Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 8bit X-Patchwork-Submitter: =?utf-8?q?Juraj_Linke=C5=A1?= X-Patchwork-Id: 135390 X-Patchwork-Delegate: thomas@monjalon.net Return-Path: X-Original-To: patchwork@inbox.dpdk.org Delivered-To: patchwork@inbox.dpdk.org Received: from mails.dpdk.org (mails.dpdk.org [217.70.189.124]) by inbox.dpdk.org (Postfix) with ESMTP id CE53C43723; Wed, 20 Dec 2023 11:34:01 +0100 (CET) Received: from mails.dpdk.org (localhost [127.0.0.1]) by mails.dpdk.org (Postfix) with ESMTP id 4985F42EA9; Wed, 20 Dec 2023 11:33:43 +0100 (CET) Received: from mail-ed1-f50.google.com (mail-ed1-f50.google.com [209.85.208.50]) by mails.dpdk.org (Postfix) with ESMTP id 2696342DDB for ; Wed, 20 Dec 2023 11:33:42 +0100 (CET) Received: by mail-ed1-f50.google.com with SMTP id 4fb4d7f45d1cf-55114c073b8so6921072a12.1 for ; Wed, 20 Dec 2023 02:33:42 -0800 (PST) DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed; d=pantheon.tech; s=google; t=1703068422; x=1703673222; darn=dpdk.org; h=content-transfer-encoding:mime-version:references:in-reply-to :message-id:date:subject:cc:to:from:from:to:cc:subject:date :message-id:reply-to; bh=sEgXVa20St1fiPJh8PLIZO86zHVPpnGhA21XCRjdwn0=; b=Z1qAg9rgtdIfcTr/xvgEHR2cl64rkISv14q+hM/9zrmMOdvfB8OXcsUeUk5hhd4wwZ LklIfOQFeloRRh62qKAziiwtiMT34+Pemy71HmXcXgs9H5fcwOlcxDQ9eQ6RAG9SivPx 1MjrLw63X30GcfbO/JMQT39n1F3CH9LvzV6ODhwYMAHNeXD9c8quWs+WEge5DFV1AEkA Zl5w2Kl91+e1Md6Xa5TzGHiyUfPcKGraDlItlpJ7ZO9ddaD8SlIKaI+kToc5fA9+0UGs ndcaXgVmd0phYvJTWpcew0P9Xmvfck0k3MT/uLAH0pkU295PzMJJzSkLiyKUeVMGe7F/ 4wFA== X-Google-DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed; d=1e100.net; s=20230601; t=1703068422; x=1703673222; h=content-transfer-encoding:mime-version:references:in-reply-to :message-id:date:subject:cc:to:from:x-gm-message-state:from:to:cc :subject:date:message-id:reply-to; bh=sEgXVa20St1fiPJh8PLIZO86zHVPpnGhA21XCRjdwn0=; b=WM0SM/q9mhh/Ri7B1mU1BUBrcxLgYpnAnmzYsqsToQUmgVmcMR6Wjxq5Uu2OUkJpdq dqndamDl80ctLutpe05o+DpU+JlKhFuhQgVWGsEXn0dMcAl4F4Vc5tY9+bq1QWHcnQSy 0p/T6ntgTMoOjEZjHJcpHIjgV52Mr9G9Tggt4YWaGghuWTDLZcma3gsNYqP6WXztLmdy /RRYwpQ4fa00Y/P3O1ID4kC4rRZGnY3sf1U8QiDSz+5Ew/+T5RvfmLnSE0W8OmKgzB6Z 8ZG8eUW3w+8FITW8wetK7za6N5fKl2lcsTw6RKvLZF4R47Lyx2ruQHA8VDfX9OM+gVNM dYZA== X-Gm-Message-State: AOJu0YzkLpI10QEBdJN5O10KcpDRkoa0cS1pRigQsvTwjadHpBQi3rR6 EUlW1U3SmWgdaFW+EVg3UC8OAQ== X-Google-Smtp-Source: AGHT+IHLwYrLoV5+HmxKwlWknoVQdp/n+Zr633VXLp9oFyBtq9b7TXU36ZtZR2ZvWlYKMXrbrKcvMg== X-Received: by 2002:a50:9546:0:b0:552:84bb:c124 with SMTP id v6-20020a509546000000b0055284bbc124mr3538066eda.86.1703068421720; Wed, 20 Dec 2023 02:33:41 -0800 (PST) Received: from jlinkes-PT-Latitude-5530.pantheon.local (81.89.53.154.host.vnet.sk. [81.89.53.154]) by smtp.gmail.com with ESMTPSA id bd18-20020a056402207200b00542db304680sm12588981edb.63.2023.12.20.02.33.39 (version=TLS1_3 cipher=TLS_AES_256_GCM_SHA384 bits=256/256); Wed, 20 Dec 2023 02:33:40 -0800 (PST) From: =?utf-8?q?Juraj_Linke=C5=A1?= To: thomas@monjalon.net, Honnappa.Nagarahalli@arm.com, jspewock@iol.unh.edu, probb@iol.unh.edu, paul.szczepanek@arm.com, yoan.picchi@foss.arm.com, Luca.Vizzarro@arm.com Cc: dev@dpdk.org, =?utf-8?q?Juraj_Linke=C5=A1?= Subject: [RFC PATCH v1 3/5] dts: process test suites at the beginning of run Date: Wed, 20 Dec 2023 11:33:29 +0100 Message-Id: <20231220103331.60888-4-juraj.linkes@pantheon.tech> X-Mailer: git-send-email 2.34.1 In-Reply-To: <20231220103331.60888-1-juraj.linkes@pantheon.tech> References: <20231220103331.60888-1-juraj.linkes@pantheon.tech> MIME-Version: 1.0 X-BeenThere: dev@dpdk.org X-Mailman-Version: 2.1.29 Precedence: list List-Id: DPDK patches and discussions List-Unsubscribe: , List-Archive: List-Post: List-Help: List-Subscribe: , Errors-To: dev-bounces@dpdk.org We initialize test suite/case objects at the start of the program and store them with custom execution config (add test case names) in Execution. This change helps identify errors with test suites earlier, and we have access to the right data when programs crash earlier. Signed-off-by: Juraj Linkeš --- dts/framework/config/__init__.py | 5 +- dts/framework/runner.py | 309 ++++++++++++++++++++--------- dts/framework/test_suite.py | 59 +----- dts/tests/TestSuite_smoke_tests.py | 2 +- 4 files changed, 217 insertions(+), 158 deletions(-) diff --git a/dts/framework/config/__init__.py b/dts/framework/config/__init__.py index 497847afb9..d65ac625f8 100644 --- a/dts/framework/config/__init__.py +++ b/dts/framework/config/__init__.py @@ -238,7 +238,6 @@ class ExecutionConfiguration: system_under_test_node: SutNodeConfiguration traffic_generator_node: TGNodeConfiguration vdevs: list[str] - skip_smoke_tests: bool @staticmethod def from_dict( @@ -247,9 +246,10 @@ def from_dict( build_targets: list[BuildTargetConfiguration] = list( map(BuildTargetConfiguration.from_dict, d["build_targets"]) ) + if not d.get("skip_smoke_tests", False): + d["test_suites"].insert(0, "smoke_tests") test_suites: list[TestSuiteConfig] = list(map(TestSuiteConfig.from_dict, d["test_suites"])) sut_name = d["system_under_test_node"]["node_name"] - skip_smoke_tests = d.get("skip_smoke_tests", False) assert sut_name in node_map, f"Unknown SUT {sut_name} in execution {d}" system_under_test_node = node_map[sut_name] assert isinstance( @@ -270,7 +270,6 @@ def from_dict( build_targets=build_targets, perf=d["perf"], func=d["func"], - skip_smoke_tests=skip_smoke_tests, test_suites=test_suites, system_under_test_node=system_under_test_node, traffic_generator_node=traffic_generator_node, diff --git a/dts/framework/runner.py b/dts/framework/runner.py index 5e145a8066..acc3342f0c 100644 --- a/dts/framework/runner.py +++ b/dts/framework/runner.py @@ -3,9 +3,14 @@ # Copyright(c) 2022-2023 PANTHEON.tech s.r.o. # Copyright(c) 2022-2023 University of New Hampshire +import importlib +import inspect import logging +import re import sys -from types import MethodType +from copy import deepcopy +from dataclasses import dataclass +from types import MethodType, ModuleType from .config import ( BuildTargetConfiguration, @@ -13,7 +18,12 @@ ExecutionConfiguration, TestSuiteConfig, ) -from .exception import BlockingTestSuiteError, SSHTimeoutError, TestCaseVerifyError +from .exception import ( + BlockingTestSuiteError, + ConfigurationError, + SSHTimeoutError, + TestCaseVerifyError, +) from .logger import DTSLOG, getLogger from .settings import SETTINGS from .test_result import ( @@ -24,25 +34,55 @@ TestCaseResult, TestSuiteResult, ) -from .test_suite import TestSuite, get_test_suites +from .test_suite import TestSuite from .testbed_model import SutNode, TGNode from .utils import check_dts_python_version +@dataclass +class TestSuiteSetup: + test_suite: type[TestSuite] + test_cases: list[MethodType] + + def processed_config(self) -> TestSuiteConfig: + return TestSuiteConfig( + test_suite=self.test_suite.__name__, + test_cases=[test_case.__name__ for test_case in self.test_cases], + ) + + +@dataclass +class Execution: + config: ExecutionConfiguration + test_suite_setups: list[TestSuiteSetup] + + def processed_config(self) -> ExecutionConfiguration: + """ + Creating copy of execution config witch add test-case names. + """ + modified_execution_config = deepcopy(self.config) + modified_execution_config.test_suites[:] = [ + test_suite.processed_config() for test_suite in self.test_suite_setups + ] + return modified_execution_config + + class DTSRunner: _logger: DTSLOG _result: DTSResult - _configuration: Configuration + _executions: list[Execution] def __init__(self, configuration: Configuration): self._logger = getLogger("DTSRunner") self._result = DTSResult(self._logger) - self._configuration = configuration + self._executions = create_executions(configuration.executions) def run(self): """ The main process of DTS. Runs all build targets in all executions from the main config file. + Suite execution consists of running all test cases scheduled to be executed. + A test case run consists of setup, execution and teardown of said test case. """ # check the python version of the server that run dts check_dts_python_version() @@ -50,22 +90,22 @@ def run(self): tg_nodes: dict[str, TGNode] = {} try: # for all Execution sections - for execution in self._configuration.executions: - sut_node = sut_nodes.get(execution.system_under_test_node.name) - tg_node = tg_nodes.get(execution.traffic_generator_node.name) + for execution in self._executions: + sut_node = sut_nodes.get(execution.config.system_under_test_node.name) + tg_node = tg_nodes.get(execution.config.traffic_generator_node.name) try: if not sut_node: - sut_node = SutNode(execution.system_under_test_node) + sut_node = SutNode(execution.config.system_under_test_node) sut_nodes[sut_node.name] = sut_node if not tg_node: - tg_node = TGNode(execution.traffic_generator_node) + tg_node = TGNode(execution.config.traffic_generator_node) tg_nodes[tg_node.name] = tg_node self._result.update_setup(Result.PASS) except Exception as e: - failed_node = execution.system_under_test_node.name + failed_node = execution.config.system_under_test_node.name if sut_node: - failed_node = execution.traffic_generator_node.name + failed_node = execution.config.traffic_generator_node.name self._logger.exception( f"The Creation of node {failed_node} failed." ) @@ -100,29 +140,34 @@ def _run_execution( self, sut_node: SutNode, tg_node: TGNode, - execution: ExecutionConfiguration, + execution: Execution, ) -> None: """ Run the given execution. This involves running the execution setup as well as running all build targets in the given execution. """ self._logger.info( - f"Running execution with SUT '{execution.system_under_test_node.name}'." + "Running execution with SUT " + f"'{execution.config.system_under_test_node.name}'." ) execution_result = self._result.add_execution(sut_node.config) execution_result.add_sut_info(sut_node.node_info) try: - sut_node.set_up_execution(execution) + sut_node.set_up_execution(execution.config) execution_result.update_setup(Result.PASS) except Exception as e: self._logger.exception("Execution setup failed.") execution_result.update_setup(Result.FAIL, e) else: - for build_target in execution.build_targets: + for build_target in execution.config.build_targets: self._run_build_target( - sut_node, tg_node, build_target, execution, execution_result + sut_node, + tg_node, + build_target, + execution, + execution_result, ) finally: @@ -138,7 +183,7 @@ def _run_build_target( sut_node: SutNode, tg_node: TGNode, build_target: BuildTargetConfiguration, - execution: ExecutionConfiguration, + execution: Execution, execution_result: ExecutionResult, ) -> None: """ @@ -171,7 +216,7 @@ def _run_test_suites( self, sut_node: SutNode, tg_node: TGNode, - execution: ExecutionConfiguration, + execution: Execution, build_target_result: BuildTargetResult, ) -> None: """ @@ -180,16 +225,18 @@ def _run_test_suites( If no subset is specified, run all test cases. """ end_build_target = False - if not execution.skip_smoke_tests: - execution.test_suites[:0] = [TestSuiteConfig.from_dict("smoke_tests")] - for test_suite_config in execution.test_suites: + for test_suite_setup in execution.test_suite_setups: try: self._run_test_suite( - sut_node, tg_node, execution, build_target_result, test_suite_config + sut_node, + tg_node, + test_suite_setup, + build_target_result, ) except BlockingTestSuiteError as e: self._logger.exception( - f"An error occurred within {test_suite_config.test_suite}. " + "An error occurred within " + f"{test_suite_setup.test_suite.__name__}. " "Skipping build target..." ) self._result.add_error(e) @@ -202,14 +249,10 @@ def _run_test_suite( self, sut_node: SutNode, tg_node: TGNode, - execution: ExecutionConfiguration, + test_suite_setup: TestSuiteSetup, build_target_result: BuildTargetResult, - test_suite_config: TestSuiteConfig, ) -> None: """Runs a single test suite. - Setup, execute and teardown the whole suite. - Suite execution consists of running all test cases scheduled to be executed. - A test cast run consists of setup, execution and teardown of said test case. Args: sut_node: Node to run tests on. @@ -220,84 +263,67 @@ def _run_test_suite( Raises: BlockingTestSuiteError: If a test suite that was marked as blocking fails. """ + test_suite = test_suite_setup.test_suite(sut_node, tg_node) + test_suite_name = test_suite_setup.test_suite.__name__ + test_suite_result = build_target_result.add_test_suite(test_suite_name) try: - full_suite_path = f"tests.TestSuite_{test_suite_config.test_suite}" - test_suite_classes = get_test_suites(full_suite_path) - suites_str = ", ".join((x.__name__ for x in test_suite_classes)) - self._logger.debug( - f"Found test suites '{suites_str}' in '{full_suite_path}'." - ) + self._logger.info(f"Starting test suite setup: {test_suite_name}") + test_suite.set_up_suite() + test_suite_result.update_setup(Result.PASS) + self._logger.info(f"Test suite setup successful: {test_suite_name}") except Exception as e: - self._logger.exception("An error occurred when searching for test suites.") - self._result.update_setup(Result.ERROR, e) + self._logger.exception(f"Test suite setup ERROR: {test_suite_name}") + test_suite_result.update_setup(Result.ERROR, e) else: - for test_suite_class in test_suite_classes: - test_suite = test_suite_class( - sut_node, tg_node, test_suite_config.test_cases - ) - - test_suite_name = test_suite.__class__.__name__ - test_suite_result = build_target_result.add_test_suite(test_suite_name) - try: - self._logger.info(f"Starting test suite setup: {test_suite_name}") - test_suite.set_up_suite() - test_suite_result.update_setup(Result.PASS) - self._logger.info(f"Test suite setup successful: {test_suite_name}") - except Exception as e: - self._logger.exception(f"Test suite setup ERROR: {test_suite_name}") - test_suite_result.update_setup(Result.ERROR, e) - - else: - self._execute_test_suite( - execution.func, test_suite, test_suite_result - ) + self._execute_test_suite( + test_suite, + test_suite_setup.test_cases, + test_suite_result, + ) - finally: - try: - test_suite.tear_down_suite() - sut_node.kill_cleanup_dpdk_apps() - test_suite_result.update_teardown(Result.PASS) - except Exception as e: - self._logger.exception( - f"Test suite teardown ERROR: {test_suite_name}" - ) - self._logger.warning( - f"Test suite '{test_suite_name}' teardown failed, " - f"the next test suite may be affected." - ) - test_suite_result.update_setup(Result.ERROR, e) - if ( - len(test_suite_result.get_errors()) > 0 - and test_suite.is_blocking - ): - raise BlockingTestSuiteError(test_suite_name) + finally: + try: + test_suite.tear_down_suite() + sut_node.kill_cleanup_dpdk_apps() + test_suite_result.update_teardown(Result.PASS) + except Exception as e: + self._logger.exception(f"Test suite teardown ERROR: {test_suite_name}") + self._logger.warning( + f"Test suite '{test_suite_name}' teardown failed, " + "the next test suite may be affected." + ) + test_suite_result.update_setup(Result.ERROR, e) + if len(test_suite_result.get_errors()) > 0 and test_suite.is_blocking: + raise BlockingTestSuiteError(test_suite_name) def _execute_test_suite( - self, func: bool, test_suite: TestSuite, test_suite_result: TestSuiteResult + self, + test_suite: TestSuite, + test_cases: list[MethodType], + test_suite_result: TestSuiteResult, ) -> None: """ Execute all test cases scheduled to be executed in this suite. """ - if func: - for test_case_method in test_suite._get_functional_test_cases(): - test_case_name = test_case_method.__name__ - test_case_result = test_suite_result.add_test_case(test_case_name) - all_attempts = SETTINGS.re_run + 1 - attempt_nr = 1 - self._run_test_case(test_suite, test_case_method, test_case_result) - while not test_case_result and attempt_nr < all_attempts: - attempt_nr += 1 - self._logger.info( - f"Re-running FAILED test case '{test_case_name}'. " - f"Attempt number {attempt_nr} out of {all_attempts}." - ) - self._run_test_case(test_suite, test_case_method, test_case_result) + for test_case_method in test_cases: + test_case_name = test_case_method.__name__ + test_case_result = test_suite_result.add_test_case(test_case_name) + all_attempts = SETTINGS.re_run + 1 + attempt_nr = 1 + self._run_test_case(test_case_method, test_suite, test_case_result) + while not test_case_result and attempt_nr < all_attempts: + attempt_nr += 1 + self._logger.info( + f"Re-running FAILED test case '{test_case_name}'. " + f"Attempt number {attempt_nr} out of {all_attempts}." + ) + self._run_test_case(test_case_method, test_suite, test_case_result) def _run_test_case( self, - test_suite: TestSuite, test_case_method: MethodType, + test_suite: TestSuite, test_case_result: TestCaseResult, ) -> None: """ @@ -305,7 +331,6 @@ def _run_test_case( Exceptions are caught and recorded in logs and results. """ test_case_name = test_case_method.__name__ - try: # run set_up function for each case test_suite.set_up_test_case() @@ -319,7 +344,7 @@ def _run_test_case( else: # run test case if setup was successful - self._execute_test_case(test_case_method, test_case_result) + self._execute_test_case(test_case_method, test_suite, test_case_result) finally: try: @@ -335,7 +360,10 @@ def _run_test_case( test_case_result.update(Result.ERROR) def _execute_test_case( - self, test_case_method: MethodType, test_case_result: TestCaseResult + self, + test_case_method: MethodType, + test_suite: TestSuite, + test_case_result: TestCaseResult, ) -> None: """ Execute one test case and handle failures. @@ -343,7 +371,7 @@ def _execute_test_case( test_case_name = test_case_method.__name__ try: self._logger.info(f"Starting test case execution: {test_case_name}") - test_case_method() + test_case_method(test_suite) test_case_result.update(Result.PASS) self._logger.info(f"Test case execution PASSED: {test_case_name}") @@ -371,3 +399,92 @@ def _exit_dts(self) -> None: logging.shutdown() sys.exit(self._result.get_return_code()) + + +def create_executions( + execution_configs: list[ExecutionConfiguration], +) -> list[Execution]: + executions: list[Execution] = [] + for execution_config in execution_configs: + test_suite_setups: list[TestSuiteSetup] = [] + + for test_suite_config in execution_config.test_suites: + testsuite_module_path = f"tests.TestSuite_{test_suite_config.test_suite}" + try: + suite_module = importlib.import_module(testsuite_module_path) + except ModuleNotFoundError as e: + raise ConfigurationError( + f"Test suite '{testsuite_module_path}' not found." + ) from e + + test_suite = _get_suite_class(suite_module, test_suite_config.test_suite) + + test_cases_to_run = test_suite_config.test_cases + test_cases_to_run.extend(SETTINGS.test_cases) + + test_cases = [] + if execution_config.func: + # add functional test cases + test_cases.extend( + _get_test_cases(test_suite, r"test_(?!perf_)", test_cases_to_run) + ) + + if execution_config.perf: + # add performance test cases + test_cases.extend( + _get_test_cases(test_suite, r"test_perf_", test_cases_to_run) + ) + + test_suite_setups.append( + TestSuiteSetup(test_suite=test_suite, test_cases=test_cases) + ) + + executions.append( + Execution( + config=execution_config, + test_suite_setups=test_suite_setups, + ) + ) + + return executions + + +def _get_suite_class(suite_module: ModuleType, suite_name: str) -> type[TestSuite]: + def is_test_suite(object) -> bool: + try: + if issubclass(object, TestSuite) and object is not TestSuite: + return True + except TypeError: + return False + return False + + suite_name_regex = suite_name.replace("_", "").lower() + for class_name, suite_class in inspect.getmembers(suite_module, is_test_suite): + if not class_name.startswith("Test"): + continue + + if suite_name_regex == class_name[4:].lower(): + return suite_class + raise ConfigurationError( + f"Cannot find valid test suite in {suite_module.__name__}." + ) + + +def _get_test_cases( + suite_class: type[TestSuite], test_case_regex: str, test_cases_to_run: list[str] +) -> list[MethodType]: + def should_be_executed(test_case_name: str) -> bool: + match = bool(re.match(test_case_regex, test_case_name)) + if test_cases_to_run: + return match and test_case_name in test_cases_to_run + + return match + + test_cases = [] + for test_case_name, test_case_method in inspect.getmembers( + suite_class, inspect.isfunction + ): + if should_be_executed(test_case_name): + test_cases.append(test_case_method) + + return test_cases diff --git a/dts/framework/test_suite.py b/dts/framework/test_suite.py index e96305deb0..e73206993d 100644 --- a/dts/framework/test_suite.py +++ b/dts/framework/test_suite.py @@ -6,20 +6,15 @@ Base class for creating DTS test cases. """ -import importlib -import inspect -import re from ipaddress import IPv4Interface, IPv6Interface, ip_interface -from types import MethodType from typing import Union from scapy.layers.inet import IP # type: ignore[import] from scapy.layers.l2 import Ether # type: ignore[import] from scapy.packet import Packet, Padding # type: ignore[import] -from .exception import ConfigurationError, TestCaseVerifyError +from .exception import TestCaseVerifyError from .logger import DTSLOG, getLogger -from .settings import SETTINGS from .testbed_model import SutNode, TGNode from .testbed_model.hw.port import Port, PortLink from .utils import get_packet_summaries @@ -47,7 +42,6 @@ class TestSuite(object): tg_node: TGNode is_blocking = False _logger: DTSLOG - _test_cases_to_run: list[str] _port_links: list[PortLink] _sut_port_ingress: Port _sut_port_egress: Port @@ -62,13 +56,10 @@ def __init__( self, sut_node: SutNode, tg_node: TGNode, - test_cases_to_run: list[str], ): self.sut_node = sut_node self.tg_node = tg_node self._logger = getLogger(self.__class__.__name__) - self._test_cases_to_run = test_cases_to_run - self._test_cases_to_run.extend(SETTINGS.test_cases) self._port_links = [] self._process_links() self._sut_port_ingress, self._tg_port_egress = ( @@ -268,51 +259,3 @@ def _verify_l3_packet(self, received_packet: IP, expected_packet: IP) -> bool: if received_packet.src != expected_packet.src or received_packet.dst != expected_packet.dst: return False return True - - def _get_functional_test_cases(self) -> list[MethodType]: - """ - Get all functional test cases. - """ - return self._get_test_cases(r"test_(?!perf_)") - - def _get_test_cases(self, test_case_regex: str) -> list[MethodType]: - """ - Return a list of test cases matching test_case_regex. - """ - self._logger.debug(f"Searching for test cases in {self.__class__.__name__}.") - filtered_test_cases = [] - for test_case_name, test_case in inspect.getmembers(self, inspect.ismethod): - if self._should_be_executed(test_case_name, test_case_regex): - filtered_test_cases.append(test_case) - cases_str = ", ".join((x.__name__ for x in filtered_test_cases)) - self._logger.debug(f"Found test cases '{cases_str}' in {self.__class__.__name__}.") - return filtered_test_cases - - def _should_be_executed(self, test_case_name: str, test_case_regex: str) -> bool: - """ - Check whether the test case should be executed. - """ - match = bool(re.match(test_case_regex, test_case_name)) - if self._test_cases_to_run: - return match and test_case_name in self._test_cases_to_run - - return match - - -def get_test_suites(testsuite_module_path: str) -> list[type[TestSuite]]: - def is_test_suite(object) -> bool: - try: - if issubclass(object, TestSuite) and object is not TestSuite: - return True - except TypeError: - return False - return False - - try: - testcase_module = importlib.import_module(testsuite_module_path) - except ModuleNotFoundError as e: - raise ConfigurationError(f"Test suite '{testsuite_module_path}' not found.") from e - return [ - test_suite_class - for _, test_suite_class in inspect.getmembers(testcase_module, is_test_suite) - ] diff --git a/dts/tests/TestSuite_smoke_tests.py b/dts/tests/TestSuite_smoke_tests.py index 8958f58dac..aa4bae5b17 100644 --- a/dts/tests/TestSuite_smoke_tests.py +++ b/dts/tests/TestSuite_smoke_tests.py @@ -10,7 +10,7 @@ from framework.utils import REGEX_FOR_PCI_ADDRESS -class SmokeTests(TestSuite): +class TestSmokeTests(TestSuite): is_blocking = True # dicts in this list are expected to have two keys: # "pci_address" and "current_driver" From patchwork Wed Dec 20 10:33:30 2023 Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 8bit X-Patchwork-Submitter: =?utf-8?q?Juraj_Linke=C5=A1?= X-Patchwork-Id: 135391 X-Patchwork-Delegate: thomas@monjalon.net Return-Path: X-Original-To: patchwork@inbox.dpdk.org Delivered-To: patchwork@inbox.dpdk.org Received: from mails.dpdk.org (mails.dpdk.org [217.70.189.124]) by inbox.dpdk.org (Postfix) with ESMTP id 5F8C743723; Wed, 20 Dec 2023 11:34:09 +0100 (CET) Received: from mails.dpdk.org (localhost [127.0.0.1]) by mails.dpdk.org (Postfix) with ESMTP id 7E91542EAC; Wed, 20 Dec 2023 11:33:44 +0100 (CET) Received: from mail-ed1-f50.google.com (mail-ed1-f50.google.com [209.85.208.50]) by mails.dpdk.org (Postfix) with ESMTP id D495742EAC for ; Wed, 20 Dec 2023 11:33:43 +0100 (CET) Received: by mail-ed1-f50.google.com with SMTP id 4fb4d7f45d1cf-54cd2281ccbso6662749a12.2 for ; Wed, 20 Dec 2023 02:33:43 -0800 (PST) DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed; d=pantheon.tech; s=google; t=1703068423; x=1703673223; darn=dpdk.org; h=content-transfer-encoding:mime-version:references:in-reply-to :message-id:date:subject:cc:to:from:from:to:cc:subject:date :message-id:reply-to; bh=C8I+tzUYCstQOXtLA4opXJHZXqBE8ATIFlqtXbIH0EA=; b=ftOiS19khzB0l4vESrb87pm9ae+SMD/p/VCKi/yzQ2hETLG2krUS1rJ2O2RQiLYHjg WWlityPJ9KMUQpJjQtOHuZV4dgW9P3FRKsTJu0zDEyo4QsMLHMfGAVKVsdwVZ/PNn33b CCsfIYu7kSMyBmqxcbj8zg27wXjOFOwkuhXwBcIOLqEUr7tEuS34TMYKAY2hSQxqUxrO bCV2jlrblp/aYMPC5gNjlILKj9nski4kHDD+x/v6RGfHp00fW98C31fYSpJFTGqFfYJY 2n2nFsbpydDurfr3LCvKRuLEmEis6+dmYpB3EVrO0DnEJhCswtM8qNMpyO4/lDzxulc/ mD4Q== X-Google-DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed; d=1e100.net; s=20230601; t=1703068423; x=1703673223; h=content-transfer-encoding:mime-version:references:in-reply-to :message-id:date:subject:cc:to:from:x-gm-message-state:from:to:cc :subject:date:message-id:reply-to; bh=C8I+tzUYCstQOXtLA4opXJHZXqBE8ATIFlqtXbIH0EA=; b=tL47M2cdX7hv7tixRe8a1OWhY0g1SHF5ebXwMiio5I7H44My+u1eD4gfJLiz9E4ejQ UNLCnNjAT1Ss1H48t31JvLaRLFGM2lRwUwokgKEJ9IzYU+0cgZ0rkbgoDdVOHXtknEBm EZMnNqitbbyvmoHV/7CJ30wT1/bwmW9bBqwSEYHpipk7fJOh4K8ruwazhC9jfhOek6Ya yy+yozgrJaCaMLhvq8HnVyDyInr240vJTo9WeeM520PA55GEFLuK4R0O0S6fu00yEKAH rdon2T7QVI5WXyhcvQeCOoBd9c9DxwDWLzPAzriZoWTapCx5CpX66cf1SdqfHvlxH3ey W86Q== X-Gm-Message-State: AOJu0Yy+Sa6Xcy8JHKHXSp3IiPnHzV/bVyju13Yn9Fe4eK1zrj/WYhFt N+BgMzbVL/HJT5FF0W4RmrB3NrWIMt4nMZHe+umWlw== X-Google-Smtp-Source: AGHT+IGSbhSKO2xqaSdbX5J3uTjjQ6IqD3qR+EC16UBS+IFXBtIQbTj6+Ayvlw2vXJ4Tzme2s5HN+A== X-Received: by 2002:a50:c8c2:0:b0:551:edec:a91d with SMTP id k2-20020a50c8c2000000b00551edeca91dmr6089535edh.82.1703068423430; Wed, 20 Dec 2023 02:33:43 -0800 (PST) Received: from jlinkes-PT-Latitude-5530.pantheon.local (81.89.53.154.host.vnet.sk. [81.89.53.154]) by smtp.gmail.com with ESMTPSA id bd18-20020a056402207200b00542db304680sm12588981edb.63.2023.12.20.02.33.41 (version=TLS1_3 cipher=TLS_AES_256_GCM_SHA384 bits=256/256); Wed, 20 Dec 2023 02:33:42 -0800 (PST) From: =?utf-8?q?Juraj_Linke=C5=A1?= To: thomas@monjalon.net, Honnappa.Nagarahalli@arm.com, jspewock@iol.unh.edu, probb@iol.unh.edu, paul.szczepanek@arm.com, yoan.picchi@foss.arm.com, Luca.Vizzarro@arm.com Cc: dev@dpdk.org, =?utf-8?q?Juraj_Linke=C5=A1?= Subject: [RFC PATCH v1 4/5] dts: block all testcases when earlier setup fails Date: Wed, 20 Dec 2023 11:33:30 +0100 Message-Id: <20231220103331.60888-5-juraj.linkes@pantheon.tech> X-Mailer: git-send-email 2.34.1 In-Reply-To: <20231220103331.60888-1-juraj.linkes@pantheon.tech> References: <20231220103331.60888-1-juraj.linkes@pantheon.tech> MIME-Version: 1.0 X-BeenThere: dev@dpdk.org X-Mailman-Version: 2.1.29 Precedence: list List-Id: DPDK patches and discussions List-Unsubscribe: , List-Archive: List-Post: List-Help: List-Subscribe: , Errors-To: dev-bounces@dpdk.org In case of a failure during execution, build target or suite setup the test case results will be recorded as blocked. We also unify methods add_ to add_child_result to be more consistently. Now we store the corresponding config in each result with child configs and parent result. Signed-off-by: Juraj Linkeš --- dts/framework/runner.py | 12 +- dts/framework/test_result.py | 361 ++++++++++++++++++++--------------- 2 files changed, 216 insertions(+), 157 deletions(-) diff --git a/dts/framework/runner.py b/dts/framework/runner.py index acc3342f0c..28570d4a1c 100644 --- a/dts/framework/runner.py +++ b/dts/framework/runner.py @@ -74,7 +74,7 @@ class DTSRunner: def __init__(self, configuration: Configuration): self._logger = getLogger("DTSRunner") - self._result = DTSResult(self._logger) + self._result = DTSResult(configuration, self._logger) self._executions = create_executions(configuration.executions) def run(self): @@ -150,7 +150,7 @@ def _run_execution( "Running execution with SUT " f"'{execution.config.system_under_test_node.name}'." ) - execution_result = self._result.add_execution(sut_node.config) + execution_result = self._result.add_child_result(execution.processed_config()) execution_result.add_sut_info(sut_node.node_info) try: @@ -190,7 +190,7 @@ def _run_build_target( Run the given build target. """ self._logger.info(f"Running build target '{build_target.name}'.") - build_target_result = execution_result.add_build_target(build_target) + build_target_result = execution_result.add_child_result(build_target) try: sut_node.set_up_build_target(build_target) @@ -265,7 +265,9 @@ def _run_test_suite( """ test_suite = test_suite_setup.test_suite(sut_node, tg_node) test_suite_name = test_suite_setup.test_suite.__name__ - test_suite_result = build_target_result.add_test_suite(test_suite_name) + test_suite_result = build_target_result.add_child_result( + test_suite_setup.processed_config() + ) try: self._logger.info(f"Starting test suite setup: {test_suite_name}") test_suite.set_up_suite() @@ -308,7 +310,7 @@ def _execute_test_suite( """ for test_case_method in test_cases: test_case_name = test_case_method.__name__ - test_case_result = test_suite_result.add_test_case(test_case_name) + test_case_result = test_suite_result.add_child_result(test_case_name) all_attempts = SETTINGS.re_run + 1 attempt_nr = 1 self._run_test_case(test_case_method, test_suite, test_case_result) diff --git a/dts/framework/test_result.py b/dts/framework/test_result.py index 4c2e7e2418..dba2c55d36 100644 --- a/dts/framework/test_result.py +++ b/dts/framework/test_result.py @@ -9,6 +9,7 @@ import os.path from collections.abc import MutableSequence from enum import Enum, auto +from typing import Any, Union from .config import ( OS, @@ -16,9 +17,11 @@ BuildTargetConfiguration, BuildTargetInfo, Compiler, + Configuration, CPUType, - NodeConfiguration, + ExecutionConfiguration, NodeInfo, + TestSuiteConfig, ) from .exception import DTSError, ErrorSeverity from .logger import DTSLOG @@ -35,6 +38,7 @@ class Result(Enum): FAIL = auto() ERROR = auto() SKIP = auto() + BLOCK = auto() def __bool__(self) -> bool: return self is self.PASS @@ -63,42 +67,6 @@ def __bool__(self) -> bool: return bool(self.result) -class Statistics(dict): - """ - A helper class used to store the number of test cases by its result - along a few other basic information. - Using a dict provides a convenient way to format the data. - """ - - def __init__(self, dpdk_version: str | None): - super(Statistics, self).__init__() - for result in Result: - self[result.name] = 0 - self["PASS RATE"] = 0.0 - self["DPDK VERSION"] = dpdk_version - - def __iadd__(self, other: Result) -> "Statistics": - """ - Add a Result to the final count. - """ - self[other.name] += 1 - self["PASS RATE"] = ( - float(self[Result.PASS.name]) * 100 / sum(self[result.name] for result in Result) - ) - return self - - def __str__(self) -> str: - """ - Provide a string representation of the data. - """ - stats_str = "" - for key, value in self.items(): - stats_str += f"{key:<12} = {value}\n" - # according to docs, we should use \n when writing to text files - # on all platforms - return stats_str - - class BaseResult(object): """ The Base class for all results. Stores the results of @@ -109,6 +77,12 @@ class BaseResult(object): setup_result: FixtureResult teardown_result: FixtureResult _inner_results: MutableSequence["BaseResult"] + _child_configs: Union[ + list[ExecutionConfiguration], + list[BuildTargetConfiguration], + list[TestSuiteConfig], + list[str], + ] def __init__(self): self.setup_result = FixtureResult() @@ -119,6 +93,23 @@ def update_setup(self, result: Result, error: Exception | None = None) -> None: self.setup_result.result = result self.setup_result.error = error + if result in [Result.BLOCK, Result.ERROR, Result.FAIL]: + for child_config in self._child_configs: + child_result = self.add_child_result(child_config) + child_result.block() + + def add_child_result(self, config: Any) -> "BaseResult": + """ + Adding corresponding result for each classes. + """ + + def block(self): + """ + Mark the result as block on corresponding classes. + """ + self.update_setup(Result.BLOCK) + self.update_teardown(Result.BLOCK) + def update_teardown(self, result: Result, error: Exception | None = None) -> None: self.teardown_result.result = result self.teardown_result.error = error @@ -139,119 +130,11 @@ def _get_inner_errors(self) -> list[Exception]: def get_errors(self) -> list[Exception]: return self._get_setup_teardown_errors() + self._get_inner_errors() - def add_stats(self, statistics: Statistics) -> None: + def add_stats(self, statistics: "Statistics") -> None: for inner_result in self._inner_results: inner_result.add_stats(statistics) -class TestCaseResult(BaseResult, FixtureResult): - """ - The test case specific result. - Stores the result of the actual test case. - Also stores the test case name. - """ - - test_case_name: str - - def __init__(self, test_case_name: str): - super(TestCaseResult, self).__init__() - self.test_case_name = test_case_name - - def update(self, result: Result, error: Exception | None = None) -> None: - self.result = result - self.error = error - - def _get_inner_errors(self) -> list[Exception]: - if self.error: - return [self.error] - return [] - - def add_stats(self, statistics: Statistics) -> None: - statistics += self.result - - def __bool__(self) -> bool: - return bool(self.setup_result) and bool(self.teardown_result) and bool(self.result) - - -class TestSuiteResult(BaseResult): - """ - The test suite specific result. - The _inner_results list stores results of test cases in a given test suite. - Also stores the test suite name. - """ - - suite_name: str - - def __init__(self, suite_name: str): - super(TestSuiteResult, self).__init__() - self.suite_name = suite_name - - def add_test_case(self, test_case_name: str) -> TestCaseResult: - test_case_result = TestCaseResult(test_case_name) - self._inner_results.append(test_case_result) - return test_case_result - - -class BuildTargetResult(BaseResult): - """ - The build target specific result. - The _inner_results list stores results of test suites in a given build target. - Also stores build target specifics, such as compiler used to build DPDK. - """ - - arch: Architecture - os: OS - cpu: CPUType - compiler: Compiler - compiler_version: str | None - dpdk_version: str | None - - def __init__(self, build_target: BuildTargetConfiguration): - super(BuildTargetResult, self).__init__() - self.arch = build_target.arch - self.os = build_target.os - self.cpu = build_target.cpu - self.compiler = build_target.compiler - self.compiler_version = None - self.dpdk_version = None - - def add_build_target_info(self, versions: BuildTargetInfo) -> None: - self.compiler_version = versions.compiler_version - self.dpdk_version = versions.dpdk_version - - def add_test_suite(self, test_suite_name: str) -> TestSuiteResult: - test_suite_result = TestSuiteResult(test_suite_name) - self._inner_results.append(test_suite_result) - return test_suite_result - - -class ExecutionResult(BaseResult): - """ - The execution specific result. - The _inner_results list stores results of build targets in a given execution. - Also stores the SUT node configuration. - """ - - sut_node: NodeConfiguration - sut_os_name: str - sut_os_version: str - sut_kernel_version: str - - def __init__(self, sut_node: NodeConfiguration): - super(ExecutionResult, self).__init__() - self.sut_node = sut_node - - def add_build_target(self, build_target: BuildTargetConfiguration) -> BuildTargetResult: - build_target_result = BuildTargetResult(build_target) - self._inner_results.append(build_target_result) - return build_target_result - - def add_sut_info(self, sut_info: NodeInfo): - self.sut_os_name = sut_info.os_name - self.sut_os_version = sut_info.os_version - self.sut_kernel_version = sut_info.kernel_version - - class DTSResult(BaseResult): """ Stores environment information and test results from a DTS run, which are: @@ -269,25 +152,27 @@ class DTSResult(BaseResult): """ dpdk_version: str | None + _child_configs: list[ExecutionConfiguration] _logger: DTSLOG _errors: list[Exception] _return_code: ErrorSeverity - _stats_result: Statistics | None + _stats_result: Union["Statistics", None] _stats_filename: str - def __init__(self, logger: DTSLOG): + def __init__(self, configuration: Configuration, logger: DTSLOG): super(DTSResult, self).__init__() self.dpdk_version = None + self._child_configs = configuration.executions self._logger = logger self._errors = [] self._return_code = ErrorSeverity.NO_ERR self._stats_result = None self._stats_filename = os.path.join(SETTINGS.output_dir, "statistics.txt") - def add_execution(self, sut_node: NodeConfiguration) -> ExecutionResult: - execution_result = ExecutionResult(sut_node) - self._inner_results.append(execution_result) - return execution_result + def add_child_result(self, config: ExecutionConfiguration) -> "ExecutionResult": + result = ExecutionResult(config, self) + self._inner_results.append(result) + return result def add_error(self, error) -> None: self._errors.append(error) @@ -325,3 +210,175 @@ def get_return_code(self) -> int: self._return_code = error_return_code return int(self._return_code) + + +class ExecutionResult(BaseResult): + """ + The execution specific result. + The _inner_results list stores results of build targets in a given execution. + Also stores the SUT node configuration. + """ + + sut_os_name: str + sut_os_version: str + sut_kernel_version: str + _config: ExecutionConfiguration + _parent_result: DTSResult + _child_configs: list[BuildTargetConfiguration] + + def __init__(self, config: ExecutionConfiguration, parent_result: DTSResult): + super(ExecutionResult, self).__init__() + self._config = config + self._parent_result = parent_result + self._child_configs = config.build_targets + + def add_sut_info(self, sut_info: NodeInfo): + self.sut_os_name = sut_info.os_name + self.sut_os_version = sut_info.os_version + self.sut_kernel_version = sut_info.kernel_version + + def add_child_result(self, config: BuildTargetConfiguration) -> "BuildTargetResult": + result = BuildTargetResult(config, self) + self._inner_results.append(result) + return result + + +class BuildTargetResult(BaseResult): + """ + The build target specific result. + The _inner_results list stores results of test suites in a given build target. + Also stores build target specifics, such as compiler used to build DPDK. + """ + + arch: Architecture + os: OS + cpu: CPUType + compiler: Compiler + compiler_version: str | None + dpdk_version: str | None + _config: BuildTargetConfiguration + _parent_result: ExecutionResult + _child_configs: list[TestSuiteConfig] + + def __init__( + self, config: BuildTargetConfiguration, parent_result: ExecutionResult + ): + super(BuildTargetResult, self).__init__() + self.arch = config.arch + self.os = config.os + self.cpu = config.cpu + self.compiler = config.compiler + self.compiler_version = None + self.dpdk_version = None + self._config = config + self._parent_result = parent_result + self._child_configs = parent_result._config.test_suites + + def add_build_target_info(self, versions: BuildTargetInfo) -> None: + self.compiler_version = versions.compiler_version + self.dpdk_version = versions.dpdk_version + + def add_child_result( + self, + config: TestSuiteConfig, + ) -> "TestSuiteResult": + result = TestSuiteResult(config, self) + self._inner_results.append(result) + return result + + +class TestSuiteResult(BaseResult): + """ + The test suite specific result. + The _inner_results list stores results of test cases in a given test suite. + Also stores the test suite name. + """ + + _config: TestSuiteConfig + _parent_result: BuildTargetResult + _child_configs: list[str] + + def __init__(self, config: TestSuiteConfig, parent_result: BuildTargetResult): + super(TestSuiteResult, self).__init__() + self._config = config + self._parent_result = parent_result + self._child_configs = config.test_cases + + def add_child_result(self, config: str) -> "TestCaseResult": + result = TestCaseResult(config, self) + self._inner_results.append(result) + return result + + +class TestCaseResult(BaseResult, FixtureResult): + """ + The test case specific result. + Stores the result of the actual test case. + Also stores the test case name. + """ + + _config: str + _parent_result: TestSuiteResult + + def __init__(self, config: str, parent_result: TestSuiteResult): + super(TestCaseResult, self).__init__() + self._config = config + self._parent_result = parent_result + + def block(self): + self.update(Result.BLOCK) + + def update(self, result: Result, error: Exception | None = None) -> None: + self.result = result + self.error = error + + def _get_inner_errors(self) -> list[Exception]: + if self.error: + return [self.error] + return [] + + def add_stats(self, statistics: "Statistics") -> None: + statistics += self.result + + def __bool__(self) -> bool: + return ( + bool(self.setup_result) and bool(self.teardown_result) and bool(self.result) + ) + + +class Statistics(dict): + """ + A helper class used to store the number of test cases by its result + along a few other basic information. + Using a dict provides a convenient way to format the data. + """ + + def __init__(self, dpdk_version: str | None): + super(Statistics, self).__init__() + for result in Result: + self[result.name] = 0 + self["PASS RATE"] = 0.0 + self["DPDK VERSION"] = dpdk_version + + def __iadd__(self, other: Result) -> "Statistics": + """ + Add a Result to the final count. + """ + self[other.name] += 1 + self["PASS RATE"] = ( + float(self[Result.PASS.name]) + * 100 + / sum(self[result.name] for result in Result) + ) + return self + + def __str__(self) -> str: + """ + Provide a string representation of the data. + """ + stats_str = "" + for key, value in self.items(): + stats_str += f"{key:<12} = {value}\n" + # according to docs, we should use \n when writing to text files + # on all platforms + return stats_str From patchwork Wed Dec 20 10:33:31 2023 Content-Type: text/plain; charset="utf-8" MIME-Version: 1.0 Content-Transfer-Encoding: 8bit X-Patchwork-Submitter: =?utf-8?q?Juraj_Linke=C5=A1?= X-Patchwork-Id: 135392 X-Patchwork-Delegate: thomas@monjalon.net Return-Path: X-Original-To: patchwork@inbox.dpdk.org Delivered-To: patchwork@inbox.dpdk.org Received: from mails.dpdk.org (mails.dpdk.org [217.70.189.124]) by inbox.dpdk.org (Postfix) with ESMTP id 39FE243723; Wed, 20 Dec 2023 11:34:19 +0100 (CET) Received: from mails.dpdk.org (localhost [127.0.0.1]) by mails.dpdk.org (Postfix) with ESMTP id 281ED42EBA; Wed, 20 Dec 2023 11:33:47 +0100 (CET) Received: from mail-lj1-f178.google.com (mail-lj1-f178.google.com [209.85.208.178]) by mails.dpdk.org (Postfix) with ESMTP id E14E442EB3 for ; Wed, 20 Dec 2023 11:33:45 +0100 (CET) Received: by mail-lj1-f178.google.com with SMTP id 38308e7fff4ca-2cc61d4e5aeso48633591fa.0 for ; Wed, 20 Dec 2023 02:33:45 -0800 (PST) DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed; d=pantheon.tech; s=google; t=1703068425; x=1703673225; darn=dpdk.org; h=content-transfer-encoding:mime-version:references:in-reply-to :message-id:date:subject:cc:to:from:from:to:cc:subject:date :message-id:reply-to; bh=9dlYuGyfG0mULoa76SHQ1yVBwxqmADwYFieC9qyU4SU=; b=hQO6pHzJ+Mgx1jwgQhaOfEBo/g/KdK1HkwaIz4yqaeC36N5zB87zjf9CoRdj/n4HuC u8d85raBhNtbC0uPD/vMLSHDeOYlmDLFrOknOVF8UgZV2yz4HaieU//hnLuRMan/3ZSr mu2UaaYcWsJTddGUe2Sx4W12JacPi8C9wzspg28DSQi3aUxDKg0Z7LKDLMfnjjBJmeIK 60ZFnjQjvKsuL3QbiUxlDT32srRjAhHfk8zKIFz8cNdV6I4NNMBBldzJIgCWxfYJr0OS zdLuzzlLrVBGVyhLHnPDBisMisEEvVz5G4KxaeTnwBAVQG5lHeJLMPVRKWiPg483a2YV tpuQ== X-Google-DKIM-Signature: v=1; a=rsa-sha256; c=relaxed/relaxed; d=1e100.net; s=20230601; t=1703068425; x=1703673225; h=content-transfer-encoding:mime-version:references:in-reply-to :message-id:date:subject:cc:to:from:x-gm-message-state:from:to:cc :subject:date:message-id:reply-to; bh=9dlYuGyfG0mULoa76SHQ1yVBwxqmADwYFieC9qyU4SU=; b=ejlFqoEuTcnIeP48zsqGIqHTGG/HlScACUYjcCU/pkhgLoA7BYzugjm/cu/4gKc/g3 eIbizbCB/uPF4DWVDsnfmGa0LySl8tLBbxwxN08e4dFqMV73yR3RrMw6zm+GSQ3umrCl kjgOFbvWTt/oVKnMNkKAIdZq/ZvdZk46YQQGoz9y8qLXOeeIDPpy1ew+4TT+TGZR88w2 cJ/Shv5LY8zlbW1NQ0DuJpKjEJt51ilt8WuCoFaYvosFyypL9oeVHESPYb4Qk0NRT8wf jTkYF89YRhJvjg0M4ylRIlQD/SVu7CXH45xxqBaNlzpQLJ9rXT/lgHqowcESCtpZkKYs Uhiw== X-Gm-Message-State: AOJu0YwMfi3JdggvzJwhfiHXCQgF6sF1QCijXZQ+AG275I4TyHo3zGW/ mvtGJJ+xLqrV1bIaBfTN3B4IQA== X-Google-Smtp-Source: AGHT+IENLswAlPpLfkHo75mROczPuUg55n/kuCqhcwyi+fb7VpFT+MZ7r81fvkroQtNlyp44oZ1caA== X-Received: by 2002:a05:6512:a8b:b0:50e:d55:3927 with SMTP id m11-20020a0565120a8b00b0050e0d553927mr7992990lfu.66.1703068425306; Wed, 20 Dec 2023 02:33:45 -0800 (PST) Received: from jlinkes-PT-Latitude-5530.pantheon.local (81.89.53.154.host.vnet.sk. [81.89.53.154]) by smtp.gmail.com with ESMTPSA id bd18-20020a056402207200b00542db304680sm12588981edb.63.2023.12.20.02.33.43 (version=TLS1_3 cipher=TLS_AES_256_GCM_SHA384 bits=256/256); Wed, 20 Dec 2023 02:33:44 -0800 (PST) From: =?utf-8?q?Juraj_Linke=C5=A1?= To: thomas@monjalon.net, Honnappa.Nagarahalli@arm.com, jspewock@iol.unh.edu, probb@iol.unh.edu, paul.szczepanek@arm.com, yoan.picchi@foss.arm.com, Luca.Vizzarro@arm.com Cc: dev@dpdk.org, =?utf-8?q?Juraj_Linke=C5=A1?= Subject: [RFC PATCH v1 5/5] dts: refactor logging configuration Date: Wed, 20 Dec 2023 11:33:31 +0100 Message-Id: <20231220103331.60888-6-juraj.linkes@pantheon.tech> X-Mailer: git-send-email 2.34.1 In-Reply-To: <20231220103331.60888-1-juraj.linkes@pantheon.tech> References: <20231220103331.60888-1-juraj.linkes@pantheon.tech> MIME-Version: 1.0 X-BeenThere: dev@dpdk.org X-Mailman-Version: 2.1.29 Precedence: list List-Id: DPDK patches and discussions List-Unsubscribe: , List-Archive: List-Post: List-Help: List-Subscribe: , Errors-To: dev-bounces@dpdk.org Refactor logging for improved configuration and flexibility, investigating unclear arguments and exploring alternatives for logging test suites into separate files. In addition, efforts were made to ensure that the modules remained independent from the logger module, enabling potential use by other consumers. Signed-off-by: Juraj Linkeš --- dts/framework/logger.py | 162 ++++++++---------- dts/framework/remote_session/__init__.py | 4 +- dts/framework/remote_session/os_session.py | 6 +- .../remote_session/remote/__init__.py | 7 +- .../remote/interactive_remote_session.py | 7 +- .../remote/interactive_shell.py | 7 +- .../remote_session/remote/remote_session.py | 8 +- .../remote_session/remote/ssh_session.py | 5 +- dts/framework/runner.py | 13 +- dts/framework/test_result.py | 6 +- dts/framework/test_suite.py | 6 +- dts/framework/testbed_model/node.py | 10 +- dts/framework/testbed_model/scapy.py | 6 +- .../testbed_model/traffic_generator.py | 5 +- dts/main.py | 6 +- 15 files changed, 124 insertions(+), 134 deletions(-) diff --git a/dts/framework/logger.py b/dts/framework/logger.py index bb2991e994..43c49c2d03 100644 --- a/dts/framework/logger.py +++ b/dts/framework/logger.py @@ -10,108 +10,98 @@ import logging import os.path -from typing import TypedDict - -from .settings import SETTINGS +from enum import Enum +from logging import FileHandler, StreamHandler +from pathlib import Path date_fmt = "%Y/%m/%d %H:%M:%S" -stream_fmt = "%(asctime)s - %(name)s - %(levelname)s - %(message)s" +stream_fmt = "%(asctime)s - %(stage)s - %(name)s - %(levelname)s - %(message)s" -class LoggerDictType(TypedDict): - logger: "DTSLOG" - name: str - node: str +def init_logger(verbose: bool, output_dir: str): + logging.raiseExceptions = False + DTSLog._output_dir = output_dir + logging.setLoggerClass(DTSLog) -# List for saving all using loggers -Loggers: list[LoggerDictType] = [] + root_logger = logging.getLogger() + root_logger.setLevel(1) + sh = StreamHandler() + sh.setFormatter(logging.Formatter(stream_fmt, date_fmt)) + sh.setLevel(logging.INFO) + if verbose: + sh.setLevel(logging.DEBUG) + root_logger.addHandler(sh) -class DTSLOG(logging.LoggerAdapter): - """ - DTS log class for framework and testsuite. - """ + if not os.path.exists(output_dir): + os.mkdir(output_dir) - _logger: logging.Logger - node: str - sh: logging.StreamHandler - fh: logging.FileHandler - verbose_fh: logging.FileHandler + add_file_handlers(Path(output_dir, "dts")) - def __init__(self, logger: logging.Logger, node: str = "suite"): - self._logger = logger - # 1 means log everything, this will be used by file handlers if their level - # is not set - self._logger.setLevel(1) - self.node = node +def add_file_handlers(log_file_path: Path) -> list[FileHandler]: + root_logger = logging.getLogger() - # add handler to emit to stdout - sh = logging.StreamHandler() - sh.setFormatter(logging.Formatter(stream_fmt, date_fmt)) - sh.setLevel(logging.INFO) # console handler default level + fh = FileHandler(f"{log_file_path}.log") + fh.setFormatter(logging.Formatter(stream_fmt, date_fmt)) + root_logger.addHandler(fh) - if SETTINGS.verbose is True: - sh.setLevel(logging.DEBUG) + verbose_fh = FileHandler(f"{log_file_path}.verbose.log") + verbose_fh.setFormatter( + logging.Formatter( + "%(asctime)s|%(stage)s|%(name)s|%(levelname)s|%(pathname)s|%(lineno)d|" + "%(funcName)s|%(process)d|%(thread)d|%(threadName)s|%(message)s", + datefmt=date_fmt, + ) + ) + root_logger.addHandler(verbose_fh) - self._logger.addHandler(sh) - self.sh = sh + return [fh, verbose_fh] - # prepare the output folder - if not os.path.exists(SETTINGS.output_dir): - os.mkdir(SETTINGS.output_dir) - logging_path_prefix = os.path.join(SETTINGS.output_dir, node) +class DtsStage(Enum): + pre_execution = "pre-execution" + execution = "execution" + build_target = "build-target" + suite = "suite" + post_execution = "post-execution" - fh = logging.FileHandler(f"{logging_path_prefix}.log") - fh.setFormatter( - logging.Formatter( - fmt="%(asctime)s - %(name)s - %(levelname)s - %(message)s", - datefmt=date_fmt, - ) - ) + def __str__(self) -> str: + return self.value - self._logger.addHandler(fh) - self.fh = fh - - # This outputs EVERYTHING, intended for post-mortem debugging - # Also optimized for processing via AWK (awk -F '|' ...) - verbose_fh = logging.FileHandler(f"{logging_path_prefix}.verbose.log") - verbose_fh.setFormatter( - logging.Formatter( - fmt="%(asctime)s|%(name)s|%(levelname)s|%(pathname)s|%(lineno)d|" - "%(funcName)s|%(process)d|%(thread)d|%(threadName)s|%(message)s", - datefmt=date_fmt, - ) - ) - self._logger.addHandler(verbose_fh) - self.verbose_fh = verbose_fh - - super(DTSLOG, self).__init__(self._logger, dict(node=self.node)) - - def logger_exit(self) -> None: - """ - Remove stream handler and logfile handler. - """ - for handler in (self.sh, self.fh, self.verbose_fh): - handler.flush() - self._logger.removeHandler(handler) - - -def getLogger(name: str, node: str = "suite") -> DTSLOG: - """ - Get logger handler and if there's no handler for specified Node will create one. - """ - global Loggers - # return saved logger - logger: LoggerDictType - for logger in Loggers: - if logger["name"] == name and logger["node"] == node: - return logger["logger"] - - # return new logger - dts_logger: DTSLOG = DTSLOG(logging.getLogger(name), node) - Loggers.append({"logger": dts_logger, "name": name, "node": node}) - return dts_logger +class DTSLog(logging.Logger): + _stage: DtsStage = DtsStage.pre_execution + _extra_file_handlers: list[FileHandler] = [] + _output_dir: None | str = None + + def makeRecord(self, *args, **kwargs): + record = super().makeRecord(*args, **kwargs) + record.stage = DTSLog._stage + return record + + def set_stage(self, stage: DtsStage, log_file_name: str | None = None): + self._remove_extra_file_handlers() + + if DTSLog._stage != stage: + self.info(f"Moving from stage '{DTSLog._stage}' to stage '{stage}'.") + DTSLog._stage = stage + + if log_file_name: + if DTSLog._output_dir: + DTSLog._extra_file_handlers.extend( + add_file_handlers(Path(DTSLog._output_dir, log_file_name)) + ) + else: + self.warning( + f"Cannot log '{DTSLog._stage}' stage in separate file, " + "output dir is not defined." + ) + + def _remove_extra_file_handlers(self) -> None: + if DTSLog._extra_file_handlers: + for extra_file_handler in DTSLog._extra_file_handlers: + self.root.removeHandler(extra_file_handler) + + DTSLog._extra_file_handlers = [] diff --git a/dts/framework/remote_session/__init__.py b/dts/framework/remote_session/__init__.py index 6124417bd7..a4ec2f40ae 100644 --- a/dts/framework/remote_session/__init__.py +++ b/dts/framework/remote_session/__init__.py @@ -11,10 +11,10 @@ """ # pylama:ignore=W0611 +import logging from framework.config import OS, NodeConfiguration from framework.exception import ConfigurationError -from framework.logger import DTSLOG from .linux_session import LinuxSession from .os_session import InteractiveShellType, OSSession @@ -30,7 +30,7 @@ ) -def create_session(node_config: NodeConfiguration, name: str, logger: DTSLOG) -> OSSession: +def create_session(node_config: NodeConfiguration, name: str, logger: logging.Logger) -> OSSession: match node_config.os: case OS.linux: return LinuxSession(node_config, name, logger) diff --git a/dts/framework/remote_session/os_session.py b/dts/framework/remote_session/os_session.py index 8a709eac1c..2524a4e669 100644 --- a/dts/framework/remote_session/os_session.py +++ b/dts/framework/remote_session/os_session.py @@ -2,6 +2,7 @@ # Copyright(c) 2023 PANTHEON.tech s.r.o. # Copyright(c) 2023 University of New Hampshire +import logging from abc import ABC, abstractmethod from collections.abc import Iterable from ipaddress import IPv4Interface, IPv6Interface @@ -9,7 +10,6 @@ from typing import Type, TypeVar, Union from framework.config import Architecture, NodeConfiguration, NodeInfo -from framework.logger import DTSLOG from framework.remote_session.remote import InteractiveShell from framework.settings import SETTINGS from framework.testbed_model import LogicalCore @@ -36,7 +36,7 @@ class OSSession(ABC): _config: NodeConfiguration name: str - _logger: DTSLOG + _logger: logging.Logger remote_session: RemoteSession interactive_session: InteractiveRemoteSession @@ -44,7 +44,7 @@ def __init__( self, node_config: NodeConfiguration, name: str, - logger: DTSLOG, + logger: logging.Logger, ): self._config = node_config self.name = name diff --git a/dts/framework/remote_session/remote/__init__.py b/dts/framework/remote_session/remote/__init__.py index 06403691a5..4a22155153 100644 --- a/dts/framework/remote_session/remote/__init__.py +++ b/dts/framework/remote_session/remote/__init__.py @@ -4,8 +4,9 @@ # pylama:ignore=W0611 +import logging + from framework.config import NodeConfiguration -from framework.logger import DTSLOG from .interactive_remote_session import InteractiveRemoteSession from .interactive_shell import InteractiveShell @@ -16,12 +17,12 @@ def create_remote_session( - node_config: NodeConfiguration, name: str, logger: DTSLOG + node_config: NodeConfiguration, name: str, logger: logging.Logger ) -> RemoteSession: return SSHSession(node_config, name, logger) def create_interactive_session( - node_config: NodeConfiguration, logger: DTSLOG + node_config: NodeConfiguration, logger: logging.Logger ) -> InteractiveRemoteSession: return InteractiveRemoteSession(node_config, logger) diff --git a/dts/framework/remote_session/remote/interactive_remote_session.py b/dts/framework/remote_session/remote/interactive_remote_session.py index 098ded1bb0..bf0996a747 100644 --- a/dts/framework/remote_session/remote/interactive_remote_session.py +++ b/dts/framework/remote_session/remote/interactive_remote_session.py @@ -2,7 +2,7 @@ # Copyright(c) 2023 University of New Hampshire """Handler for an SSH session dedicated to interactive shells.""" - +import logging import socket import traceback @@ -16,7 +16,6 @@ from framework.config import NodeConfiguration from framework.exception import SSHConnectionError -from framework.logger import DTSLOG class InteractiveRemoteSession: @@ -54,11 +53,11 @@ class InteractiveRemoteSession: username: str password: str session: SSHClient - _logger: DTSLOG + _logger: logging.Logger _node_config: NodeConfiguration _transport: Transport | None - def __init__(self, node_config: NodeConfiguration, _logger: DTSLOG) -> None: + def __init__(self, node_config: NodeConfiguration, _logger: logging.Logger) -> None: self._node_config = node_config self._logger = _logger self.hostname = node_config.hostname diff --git a/dts/framework/remote_session/remote/interactive_shell.py b/dts/framework/remote_session/remote/interactive_shell.py index 4db19fb9b3..b6074838c2 100644 --- a/dts/framework/remote_session/remote/interactive_shell.py +++ b/dts/framework/remote_session/remote/interactive_shell.py @@ -11,14 +11,13 @@ elevated privileges to start it is expected that the method for gaining those privileges is provided when initializing the class. """ - +import logging from abc import ABC from pathlib import PurePath from typing import Callable from paramiko import Channel, SSHClient, channel # type: ignore[import] -from framework.logger import DTSLOG from framework.settings import SETTINGS @@ -58,7 +57,7 @@ class InteractiveShell(ABC): _stdin: channel.ChannelStdinFile _stdout: channel.ChannelFile _ssh_channel: Channel - _logger: DTSLOG + _logger: logging.Logger _timeout: float _app_args: str _default_prompt: str = "" @@ -69,7 +68,7 @@ class InteractiveShell(ABC): def __init__( self, interactive_session: SSHClient, - logger: DTSLOG, + logger: logging.Logger, get_privileged_command: Callable[[str], str] | None, app_args: str = "", timeout: float = SETTINGS.timeout, diff --git a/dts/framework/remote_session/remote/remote_session.py b/dts/framework/remote_session/remote/remote_session.py index 719f7d1ef7..da78e5c921 100644 --- a/dts/framework/remote_session/remote/remote_session.py +++ b/dts/framework/remote_session/remote/remote_session.py @@ -2,14 +2,13 @@ # Copyright(c) 2010-2014 Intel Corporation # Copyright(c) 2022-2023 PANTHEON.tech s.r.o. # Copyright(c) 2022-2023 University of New Hampshire - import dataclasses +import logging from abc import ABC, abstractmethod from pathlib import PurePath from framework.config import NodeConfiguration from framework.exception import RemoteCommandExecutionError -from framework.logger import DTSLOG from framework.settings import SETTINGS @@ -50,14 +49,14 @@ class RemoteSession(ABC): username: str password: str history: list[CommandResult] - _logger: DTSLOG + _logger: logging.Logger _node_config: NodeConfiguration def __init__( self, node_config: NodeConfiguration, session_name: str, - logger: DTSLOG, + logger: logging.Logger, ): self._node_config = node_config @@ -120,7 +119,6 @@ def close(self, force: bool = False) -> None: """ Close the remote session and free all used resources. """ - self._logger.logger_exit() self._close(force) @abstractmethod diff --git a/dts/framework/remote_session/remote/ssh_session.py b/dts/framework/remote_session/remote/ssh_session.py index 1a7ee649ab..42441c4587 100644 --- a/dts/framework/remote_session/remote/ssh_session.py +++ b/dts/framework/remote_session/remote/ssh_session.py @@ -1,6 +1,6 @@ # SPDX-License-Identifier: BSD-3-Clause # Copyright(c) 2023 PANTHEON.tech s.r.o. - +import logging import socket import traceback from pathlib import PurePath @@ -20,7 +20,6 @@ from framework.config import NodeConfiguration from framework.exception import SSHConnectionError, SSHSessionDeadError, SSHTimeoutError -from framework.logger import DTSLOG from .remote_session import CommandResult, RemoteSession @@ -49,7 +48,7 @@ def __init__( self, node_config: NodeConfiguration, session_name: str, - logger: DTSLOG, + logger: logging.Logger, ): super(SSHSession, self).__init__(node_config, session_name, logger) diff --git a/dts/framework/runner.py b/dts/framework/runner.py index 28570d4a1c..5c06e4ca1a 100644 --- a/dts/framework/runner.py +++ b/dts/framework/runner.py @@ -11,6 +11,7 @@ from copy import deepcopy from dataclasses import dataclass from types import MethodType, ModuleType +from typing import cast from .config import ( BuildTargetConfiguration, @@ -24,7 +25,7 @@ SSHTimeoutError, TestCaseVerifyError, ) -from .logger import DTSLOG, getLogger +from .logger import DTSLog, DtsStage from .settings import SETTINGS from .test_result import ( BuildTargetResult, @@ -68,12 +69,12 @@ def processed_config(self) -> ExecutionConfiguration: class DTSRunner: - _logger: DTSLOG + _logger: DTSLog _result: DTSResult _executions: list[Execution] def __init__(self, configuration: Configuration): - self._logger = getLogger("DTSRunner") + self._logger = cast(DTSLog, logging.getLogger("DTSRunner")) self._result = DTSResult(configuration, self._logger) self._executions = create_executions(configuration.executions) @@ -146,6 +147,7 @@ def _run_execution( Run the given execution. This involves running the execution setup as well as running all build targets in the given execution. """ + self._logger.set_stage(DtsStage.execution) self._logger.info( "Running execution with SUT " f"'{execution.config.system_under_test_node.name}'." @@ -175,6 +177,7 @@ def _run_execution( sut_node.tear_down_execution() execution_result.update_teardown(Result.PASS) except Exception as e: + self._logger.set_stage(DtsStage.execution) self._logger.exception("Execution teardown failed.") execution_result.update_teardown(Result.FAIL, e) @@ -189,6 +192,7 @@ def _run_build_target( """ Run the given build target. """ + self._logger.set_stage(DtsStage.build_target) self._logger.info(f"Running build target '{build_target.name}'.") build_target_result = execution_result.add_child_result(build_target) @@ -209,6 +213,7 @@ def _run_build_target( sut_node.tear_down_build_target() build_target_result.update_teardown(Result.PASS) except Exception as e: + self._logger.set_stage(DtsStage.build_target) self._logger.exception("Build target teardown failed.") build_target_result.update_teardown(Result.FAIL, e) @@ -265,6 +270,7 @@ def _run_test_suite( """ test_suite = test_suite_setup.test_suite(sut_node, tg_node) test_suite_name = test_suite_setup.test_suite.__name__ + self._logger.set_stage(DtsStage.suite, test_suite_name) test_suite_result = build_target_result.add_child_result( test_suite_setup.processed_config() ) @@ -397,6 +403,7 @@ def _exit_dts(self) -> None: self._result.process() if self._logger: + self._logger.set_stage(DtsStage.post_execution) self._logger.info("DTS execution has ended.") logging.shutdown() diff --git a/dts/framework/test_result.py b/dts/framework/test_result.py index dba2c55d36..221e75205e 100644 --- a/dts/framework/test_result.py +++ b/dts/framework/test_result.py @@ -6,6 +6,7 @@ Generic result container and reporters """ +import logging import os.path from collections.abc import MutableSequence from enum import Enum, auto @@ -24,7 +25,6 @@ TestSuiteConfig, ) from .exception import DTSError, ErrorSeverity -from .logger import DTSLOG from .settings import SETTINGS @@ -153,13 +153,13 @@ class DTSResult(BaseResult): dpdk_version: str | None _child_configs: list[ExecutionConfiguration] - _logger: DTSLOG + _logger: logging.Logger _errors: list[Exception] _return_code: ErrorSeverity _stats_result: Union["Statistics", None] _stats_filename: str - def __init__(self, configuration: Configuration, logger: DTSLOG): + def __init__(self, configuration: Configuration, logger: logging.Logger): super(DTSResult, self).__init__() self.dpdk_version = None self._child_configs = configuration.executions diff --git a/dts/framework/test_suite.py b/dts/framework/test_suite.py index e73206993d..9c9a8c1e08 100644 --- a/dts/framework/test_suite.py +++ b/dts/framework/test_suite.py @@ -6,6 +6,7 @@ Base class for creating DTS test cases. """ +import logging from ipaddress import IPv4Interface, IPv6Interface, ip_interface from typing import Union @@ -14,7 +15,6 @@ from scapy.packet import Packet, Padding # type: ignore[import] from .exception import TestCaseVerifyError -from .logger import DTSLOG, getLogger from .testbed_model import SutNode, TGNode from .testbed_model.hw.port import Port, PortLink from .utils import get_packet_summaries @@ -41,7 +41,7 @@ class TestSuite(object): sut_node: SutNode tg_node: TGNode is_blocking = False - _logger: DTSLOG + _logger: logging.Logger _port_links: list[PortLink] _sut_port_ingress: Port _sut_port_egress: Port @@ -59,7 +59,7 @@ def __init__( ): self.sut_node = sut_node self.tg_node = tg_node - self._logger = getLogger(self.__class__.__name__) + self._logger = logging.getLogger(self.__class__.__name__) self._port_links = [] self._process_links() self._sut_port_ingress, self._tg_port_egress = ( diff --git a/dts/framework/testbed_model/node.py b/dts/framework/testbed_model/node.py index ef700d8114..a98c58df4f 100644 --- a/dts/framework/testbed_model/node.py +++ b/dts/framework/testbed_model/node.py @@ -6,7 +6,7 @@ """ A node is a generic host that DTS connects to and manages. """ - +import logging from abc import ABC from ipaddress import IPv4Interface, IPv6Interface from typing import Any, Callable, Type, Union @@ -16,7 +16,6 @@ ExecutionConfiguration, NodeConfiguration, ) -from framework.logger import DTSLOG, getLogger from framework.remote_session import InteractiveShellType, OSSession, create_session from framework.settings import SETTINGS @@ -43,7 +42,7 @@ class Node(ABC): name: str lcores: list[LogicalCore] ports: list[Port] - _logger: DTSLOG + _logger: logging.Logger _other_sessions: list[OSSession] _execution_config: ExecutionConfiguration virtual_devices: list[VirtualDevice] @@ -51,7 +50,7 @@ class Node(ABC): def __init__(self, node_config: NodeConfiguration): self.config = node_config self.name = node_config.name - self._logger = getLogger(self.name) + self._logger = logging.getLogger(self.name) self.main_session = create_session(self.config, self.name, self._logger) self._logger.info(f"Connected to node: {self.name}") @@ -137,7 +136,7 @@ def create_session(self, name: str) -> OSSession: connection = create_session( self.config, session_name, - getLogger(session_name, node=self.name), + logging.getLogger(session_name), ) self._other_sessions.append(connection) return connection @@ -237,7 +236,6 @@ def close(self) -> None: self.main_session.close() for session in self._other_sessions: session.close() - self._logger.logger_exit() @staticmethod def skip_setup(func: Callable[..., Any]) -> Callable[..., Any]: diff --git a/dts/framework/testbed_model/scapy.py b/dts/framework/testbed_model/scapy.py index 9083e92b3d..61058cd38a 100644 --- a/dts/framework/testbed_model/scapy.py +++ b/dts/framework/testbed_model/scapy.py @@ -13,6 +13,7 @@ """ import inspect +import logging import marshal import time import types @@ -24,7 +25,6 @@ from scapy.packet import Packet # type: ignore[import] from framework.config import OS, ScapyTrafficGeneratorConfig -from framework.logger import DTSLOG, getLogger from framework.remote_session import PythonShell from framework.settings import SETTINGS @@ -190,12 +190,12 @@ class ScapyTrafficGenerator(CapturingTrafficGenerator): rpc_server_proxy: xmlrpc.client.ServerProxy _config: ScapyTrafficGeneratorConfig _tg_node: TGNode - _logger: DTSLOG + _logger: logging.Logger def __init__(self, tg_node: TGNode, config: ScapyTrafficGeneratorConfig): self._config = config self._tg_node = tg_node - self._logger = getLogger(f"{self._tg_node.name} {self._config.traffic_generator_type}") + self._logger = logging.getLogger(f"{self._tg_node.name} {self._config.traffic_generator_type}") assert ( self._tg_node.config.os == OS.linux diff --git a/dts/framework/testbed_model/traffic_generator.py b/dts/framework/testbed_model/traffic_generator.py index 28c35d3ce4..6b0838958a 100644 --- a/dts/framework/testbed_model/traffic_generator.py +++ b/dts/framework/testbed_model/traffic_generator.py @@ -7,12 +7,11 @@ These traffic generators can't capture received traffic, only count the number of received packets. """ - +import logging from abc import ABC, abstractmethod from scapy.packet import Packet # type: ignore[import] -from framework.logger import DTSLOG from framework.utils import get_packet_summaries from .hw.port import Port @@ -24,7 +23,7 @@ class TrafficGenerator(ABC): Defines the few basic methods that each traffic generator must implement. """ - _logger: DTSLOG + _logger: logging.Logger def send_packet(self, packet: Packet, port: Port) -> None: """Send a packet and block until it is fully sent. diff --git a/dts/main.py b/dts/main.py index 879ce5cb89..f2828148f0 100755 --- a/dts/main.py +++ b/dts/main.py @@ -8,18 +8,18 @@ A test framework for testing DPDK. """ -import logging - from framework.config import load_config +from framework.logger import init_logger from framework.runner import DTSRunner +from framework.settings import SETTINGS def main() -> None: + init_logger(SETTINGS.verbose, SETTINGS.output_dir) dts = DTSRunner(configuration=load_config()) dts.run() # Main program begins here if __name__ == "__main__": - logging.raiseExceptions = True main()