diff --git a/Tests/kaas/scs-sonobuoy-config.yaml b/Tests/kaas/scs-sonobuoy-config.yaml new file mode 100644 index 000000000..b42b3d516 --- /dev/null +++ b/Tests/kaas/scs-sonobuoy-config.yaml @@ -0,0 +1,26 @@ +okToFail: +# InternalIP still used in tests. Having only an ExternalIP is considered valid by SCS: +- regex: HostPort validates that there is no conflict between pods with same hostPort but different hostIP and protocol + reason: "Fails when a cluster has no InternalIP (only ExternalIP): https://github.com/kubernetes/kubernetes/issues/136626" +- regex: Services should be able to switch session affinity for NodePort service + reason: "Fails when a cluster has no InternalIP (only ExternalIP): https://github.com/kubernetes/kubernetes/issues/136626" +- regex: Services should have session affinity work for NodePort service + reason: "Fails when a cluster has no InternalIP (only ExternalIP): https://github.com/kubernetes/kubernetes/issues/136626" +- regex: validates that there exists conflict between pods with same hostPort and protocol but one using 0.0.0.0 hostIP + reason: "Fails when a cluster has no InternalIP (only ExternalIP): https://github.com/kubernetes/kubernetes/issues/136626" + +# Was flaky +- regex: Netpol NetworkPolicy between server and client should allow ingress access from updated namespace + reason: Flaky test. Fix in v1.36 (https://github.com/kubernetes/kubernetes/pull/136715) + +# SCTP is optional +- regex: Feature:SCTPConnectivity + reason: SCTPConnectivity is optional. Currently Cilium does not support it by default. + + # Tests skipped by Cilium: https://github.com/cilium/cilium/blob/main/.github/workflows/k8s-kind-network-policies-e2e.yaml#L177-L185 +- regex: should.allow.egress.access.to.server.in.CIDR.block + reason: https://github.com/cilium/cilium/issues/9209 +- regex: should.ensure.an.IP.overlapping.both.IPBlock.CIDR.and.IPBlock.Except.is.allowed + reason: https://github.com/cilium/cilium/issues/9209 +- regex: should.enforce.except.clause.while.egress.access.to.server.in.CIDR.block + reason: https://github.com/cilium/cilium/issues/9209 diff --git a/Tests/kaas/sonobuoy_handler/run_sonobuoy.py b/Tests/kaas/sonobuoy_handler/run_sonobuoy.py index 50ef4249c..39602e530 100755 --- a/Tests/kaas/sonobuoy_handler/run_sonobuoy.py +++ b/Tests/kaas/sonobuoy_handler/run_sonobuoy.py @@ -6,21 +6,35 @@ import click -from sonobuoy_handler import SonobuoyHandler +from sonobuoy_handler import SonobuoyHandler, check_sonobuoy_result logger = logging.getLogger(__name__) -@click.command() +@click.group() +def cli(): + pass + + +@cli.command("run") @click.option("-k", "--kubeconfig", "kubeconfig", required=True, type=click.Path(exists=True), help="path/to/kubeconfig_file.yaml",) @click.option("-r", "--result_dir_name", "result_dir_name", type=str, default="sonobuoy_results", help="directory name to store results at",) @click.option("-c", "--check", "check_name", type=str, default="sonobuoy_executor", help="this MUST be the same name as the id in 'scs-compatible-kaas.yaml'",) +@click.option("--scs-sonobuoy-config", "scs_sonobuoy_config_yaml", type=click.Path(exists=True), default="kaas/sonobuoy-config.yaml", help="Configuration for Sonobuoy (yaml format)") @click.option("-a", "--arg", "args", multiple=True) -def sonobuoy_run(kubeconfig, result_dir_name, check_name, args): - sonobuoy_handler = SonobuoyHandler(check_name, kubeconfig, result_dir_name, args) +def sonobuoy_run(kubeconfig, result_dir_name, check_name, scs_sonobuoy_config_yaml, args): + sonobuoy_handler = SonobuoyHandler(check_name, kubeconfig, result_dir_name, scs_sonobuoy_config_yaml, args) sys.exit(sonobuoy_handler.run()) +@cli.command("check-results") +@click.option("--scs-sonobuoy-config", "scs_sonobuoy_config_yaml", type=click.Path(exists=True), default="kaas/scs-sonobuoy-config.yaml", help="Configuration for Sonobuoy (yaml format)") +@click.argument("sonobuoy_result_yaml", type=click.Path(exists=True)) +def check_results(scs_sonobuoy_config_yaml, sonobuoy_result_yaml): + check_sonobuoy_result(scs_sonobuoy_config_yaml, sonobuoy_result_yaml) + sys.exit(0) + + if __name__ == "__main__": logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.DEBUG) - sonobuoy_run() + cli() diff --git a/Tests/kaas/sonobuoy_handler/sonobuoy_handler.py b/Tests/kaas/sonobuoy_handler/sonobuoy_handler.py index 638fc4652..e416d5be8 100644 --- a/Tests/kaas/sonobuoy_handler/sonobuoy_handler.py +++ b/Tests/kaas/sonobuoy_handler/sonobuoy_handler.py @@ -1,6 +1,6 @@ from collections import Counter -import json import logging +import re import os import os.path import shlex @@ -25,7 +25,7 @@ def _find_sonobuoy(): def _fmt_result(counter): - return ', '.join(f"{counter.get(key, 0)} {key}" for key in ('passed', 'failed', 'skipped')) + return ', '.join(f"{counter.get(key, 0)} {key}" for key in ('passed', 'failed', 'failed_ok', 'skipped')) class SonobuoyHandler: @@ -42,6 +42,7 @@ def __init__( check_name="sonobuoy_handler", kubeconfig=None, result_dir_name="sonobuoy_results", + scs_sonobuoy_config_yaml="kaas/sonobuoy-config.yaml", args=(), ): self.check_name = check_name @@ -55,6 +56,9 @@ def __init__( logger.debug(f"working from {self.working_directory}") logger.debug(f"placing results at {self.result_dir_name}") logger.debug(f"sonobuoy executable at {self.sonobuoy}") + if not os.path.exists(scs_sonobuoy_config_yaml): + raise RuntimeError(f"scs_sonobuoy_config_yaml {scs_sonobuoy_config_yaml} does not exist.") + self.scs_sonobuoy_config_yaml = scs_sonobuoy_config_yaml self.args = (arg0 for arg in args for arg0 in shlex.split(str(arg))) def _invoke_sonobuoy(self, *args, **kwargs): @@ -69,16 +73,6 @@ def _sonobuoy_run(self): def _sonobuoy_delete(self): self._invoke_sonobuoy("delete", "--wait") - def _sonobuoy_status_result(self): - process = self._invoke_sonobuoy("status", "--json", capture_output=True) - json_data = json.loads(process.stdout) - counter = Counter() - for entry in json_data["plugins"]: - logger.debug(f"plugin {entry['plugin']}: {_fmt_result(entry['result-counts'])}") - for key, value in entry["result-counts"].items(): - counter[key] += value - return counter - def _eval_result(self, counter): """evaluate test results and return return code""" result_message = f"sonobuoy reports {_fmt_result(counter)}" @@ -99,7 +93,7 @@ def _sonobuoy_retrieve_result(self, plugin='e2e'): """ Invoke sonobuoy to retrieve results and to store them in a subdirectory of the working directory. Analyze the results yaml file for given `plugin` and - log each failure as ERROR. Return summary dict like `_sonobuoy_status_result`. + log each failure as ERROR. Return summary dict. """ logger.debug(f"retrieving results to {self.result_dir_name}") result_dir = os.path.join(self.working_directory, self.result_dir_name) @@ -108,21 +102,9 @@ def _sonobuoy_retrieve_result(self, plugin='e2e'): self._invoke_sonobuoy("retrieve", "-x", result_dir) yaml_path = os.path.join(result_dir, 'plugins', plugin, 'sonobuoy_results.yaml') logger.debug(f"parsing results from {yaml_path}") - with open(yaml_path, "r") as fileobj: - result_obj = yaml.load(fileobj.read(), yaml.SafeLoader) - counter = Counter() - for item1 in result_obj.get('items', ()): - # file ... - for item2 in item1.get('items', ()): - # suite ... - for item in item2.get('items', ()): - # testcase ... or so - status = item.get('status', 'skipped') - counter[status] += 1 - if status == 'failed': - logger.error(f"FAILED: {item['name']}") # <-- this is why this method exists! - logger.info(f"{plugin} results: {_fmt_result(counter)}") - return counter + ok_to_fail_regex_list = _load_ok_to_fail_regex_list(self.scs_sonobuoy_config_yaml) + + return sonobuoy_parse_result(plugin, yaml_path, ok_to_fail_regex_list) def run(self): """ @@ -132,16 +114,91 @@ def run(self): self._preflight_check() try: self._sonobuoy_run() - return_code = self._eval_result(self._sonobuoy_status_result()) + counter = self._sonobuoy_retrieve_result() + return_code = self._eval_result(counter) print(self.check_name + ": " + ("PASS", "FAIL")[min(1, return_code)]) - try: - self._sonobuoy_retrieve_result() - except Exception: - # swallow exception for the time being - logger.debug('problem retrieving results', exc_info=True) return return_code except BaseException: logger.exception("something went wrong") return 112 finally: self._sonobuoy_delete() + + +def sonobuoy_parse_result(plugin, sonobuoy_results_yaml_path, ok_to_fail_regex_list): + with open(sonobuoy_results_yaml_path, "r") as fileobj: + result_obj = yaml.load(fileobj.read(), yaml.SafeLoader) + + counter = Counter() + for item1 in result_obj.get("items", ()): + # file ... + for item2 in item1.get("items", ()): + # suite ... + for item in item2.get("items", ()): + # testcase ... or so + status = item.get("status", "skipped") + if status == "failed": + if ok_to_fail(ok_to_fail_regex_list, item["name"]): + status = "failed_ok" + else: + logger.error(f"FAILED: {item['name']}") + counter[status] += 1 + + logger.info(f"{plugin} results: {_fmt_result(counter)}") + return counter + + +def _load_ok_to_fail_regex_list(scs_sonobuoy_config_yaml): + with open(scs_sonobuoy_config_yaml, "r") as fileobj: + config_obj = yaml.load(fileobj.read(), yaml.SafeLoader) or {} + if not isinstance(config_obj, dict): + raise ValueError(f"Invalid sonobuoy config format in {scs_sonobuoy_config_yaml}: top-level YAML object must be a mapping") + allowed_top_level_keys = {"okToFail"} + unknown_top_level_keys = set(config_obj) - allowed_top_level_keys + if unknown_top_level_keys: + raise ValueError( + f"Invalid sonobuoy config format in {scs_sonobuoy_config_yaml}: unknown top-level keys: {sorted(unknown_top_level_keys)}" + ) + ok_to_fail_items = config_obj.get("okToFail", ()) + if not isinstance(ok_to_fail_items, list): + raise ValueError(f"Invalid sonobuoy config format in {scs_sonobuoy_config_yaml}: okToFail must be a list") + + ok_to_fail_regex_list = [] + for idx, entry in enumerate(ok_to_fail_items): + if not isinstance(entry, dict): + raise ValueError( + f"Invalid sonobuoy config format in {scs_sonobuoy_config_yaml}: okToFail[{idx}] must be a mapping" + ) + allowed_entry_keys = {"regex", "reason"} + unknown_entry_keys = set(entry) - allowed_entry_keys + if unknown_entry_keys: + raise ValueError( + f"Invalid sonobuoy config format in {scs_sonobuoy_config_yaml}: okToFail[{idx}] has unknown keys: {sorted(unknown_entry_keys)}" + ) + regex = entry.get("regex") + if not isinstance(regex, str) or not regex.strip(): + raise ValueError( + f"Invalid sonobuoy config format in {scs_sonobuoy_config_yaml}: okToFail[{idx}].regex must be a non-empty string" + ) + reason = entry.get("reason") + if not isinstance(reason, str) or not reason.strip(): + raise ValueError( + f"Invalid sonobuoy config format in {scs_sonobuoy_config_yaml}: okToFail[{idx}].reason must be a non-empty string" + ) + ok_to_fail_regex_list.append((re.compile(regex), reason)) + return ok_to_fail_regex_list + + +def ok_to_fail(ok_to_fail_regex_list, test_name): + name = test_name + for regex, _ in ok_to_fail_regex_list: + if re.search(regex, name): + return True + return False + + +def check_sonobuoy_result(scs_sonobuoy_config_yaml, result_yaml): + ok_to_fail_regex_list = _load_ok_to_fail_regex_list(scs_sonobuoy_config_yaml) + counter = sonobuoy_parse_result("", result_yaml, ok_to_fail_regex_list) + for key, value in counter.items(): + print(f"{key}: {value}") diff --git a/Tests/scs-compatible-kaas.yaml b/Tests/scs-compatible-kaas.yaml index ac7f0889e..74acdb1a7 100644 --- a/Tests/scs-compatible-kaas.yaml +++ b/Tests/scs-compatible-kaas.yaml @@ -11,7 +11,7 @@ variables: - kubeconfig scripts: - executable: ./kaas/sonobuoy_handler/run_sonobuoy.py - args: -k {kubeconfig} -r {subject_root}/sono-results-e2e -c 'cncf-k8s-conformance' -a '--mode=certified-conformance' + args: run -k {kubeconfig} --scs-sonobuoy-config kaas/scs-sonobuoy-config.yaml -r {subject_root}/sono-results-e2e -c 'cncf-k8s-conformance' -a '--mode=certified-conformance' #~ args: -k {kubeconfig} -r {subject_root}/sono-results -c 'cncf-k8s-conformance' -a '--plugin-env e2e.E2E_DRYRUN=true' testcases: - id: cncf-k8s-conformance @@ -31,7 +31,7 @@ scripts: description: Must fulfill all requirements of scs-0214-v2. url: https://docs.scs.community/standards/scs-0214-v2-k8s-node-distribution#decision - executable: ./kaas/sonobuoy_handler/run_sonobuoy.py - args: -k {kubeconfig} -r {subject_root}/sono-results-0219 -c 'kaas-networking-check' -a '--e2e-focus "NetworkPolicy"' + args: run -k {kubeconfig} --scs-sonobuoy-config kaas/scs-sonobuoy-config.yaml -r {subject_root}/sono-results-0219 -c 'kaas-networking-check' -a '--e2e-focus "NetworkPolicy"' testcases: - id: kaas-networking-check description: Must fulfill all requirements of scs-0219-v1.