Fix test discovery by checktest.py

Chectest.py wasn't able to catch bug [1] as it didn't take into
consideration inheritance during the test discovery.

The cause of the issue is the fact that chectest.py parsed python files
by itself to discover the tests. This patch fixes this by using unittest
library for test discovery instead.

[1] https://review.opendev.org/c/osf/interop/+/806178

Story: 2009146
Task: 43097
Change-Id: I6e1b11eeb3ca1915ca41b6af88f9f568e6d674eb
This commit is contained in:
lpiwowar 2021-08-30 16:17:18 +02:00
parent 705b7841f4
commit cf024be535
2 changed files with 60 additions and 39 deletions

View File

@ -1,4 +1,5 @@
# Copyright 2018, OpenStack Foundation # Copyright 2018, OpenStack Foundation
# Copyright 2021, Red Hat, Inc.
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain # not use this file except in compliance with the License. You may obtain
@ -13,12 +14,12 @@
# under the License. # under the License.
import argparse import argparse
import ast
import importlib import importlib
import json import json
import os import os
import re import re
import sys import sys
import unittest
def get_v1_version(guideline): def get_v1_version(guideline):
@ -90,7 +91,7 @@ def get_submodules(parent_module_name):
if not os.path.exists(os.path.join(root, '__init__.py')): if not os.path.exists(os.path.join(root, '__init__.py')):
continue continue
for f in files: for f in files:
if f.endswith('.py'): if f.endswith('.py') and not f == "__init__.py":
module_path = root + '/' + f module_path = root + '/' + f
module_name = root_name + '.' + os.path.splitext(f)[0] module_name = root_name + '.' + os.path.splitext(f)[0]
if module_name not in submodules: if module_name not in submodules:
@ -98,26 +99,42 @@ def get_submodules(parent_module_name):
return submodules return submodules
def get_tests(module_name): def get_module_tests(tests, parsed_test):
submodules = get_submodules(module_name) if isinstance(tests, unittest.TestCase):
tests = {} test_description = tests.id()
for module_name in submodules: test_uuid_regex = r'id-\w{8}-\w{4}-\w{4}-\w{4}-\w{12}'
filename = submodules[module_name] test_id = re.search(test_uuid_regex, test_description)
with open(filename, 'r') as f:
source = f.read() if not test_id:
parsed = ast.parse(source) return parsed_test
for node in parsed.body:
if node.__class__ is ast.ClassDef: test_id = test_id.group(0)
for classnode in node.body: test_name = test_description.split("[")[0]
if (classnode.__class__ is ast.FunctionDef and test_list = parsed_test.get(test_id, [])
classnode.name.startswith('test_')): test_list.append(test_name)
for decorator in classnode.decorator_list: parsed_test[test_id] = test_list
if hasattr(decorator, 'func'):
if decorator.func.attr == 'idempotent_id': return parsed_test
tests['id-' + decorator.args[0].s] = \ elif not isinstance(tests, unittest.suite.TestSuite):
module_name + "." + node.name + "." + \ return
classnode.name
return tests for test in tests:
parsed_test = get_module_tests(test, parsed_test)
return parsed_test
def get_tests(submodules):
loader = unittest.TestLoader()
parsed_tests = {}
for submodule in submodules:
try:
tests = loader.loadTestsFromName(submodule)
parsed_tests = get_module_tests(tests, parsed_tests)
except Exception as e:
print("Unable to load: {}. Exception: {}".format(submodule, e))
return parsed_tests
def run(): def run():
@ -133,10 +150,10 @@ def run():
'against') 'against')
args = parser.parse_args() args = parser.parse_args()
guideline = load_guideline(args.guideline_file) guideline = load_guideline(args.guideline_file)
required = get_required_tests(guideline) required = get_required_tests(guideline)
tests = get_tests(args.testlib) submodules = get_submodules(args.testlib)
lib_tests = get_tests(submodules)
missing_uuids = [] missing_uuids = []
missing_tests = {} missing_tests = {}
@ -144,10 +161,11 @@ def run():
for test in required: for test in required:
uuid = test[0] uuid = test[0]
testnames = test[1] testnames = test[1]
if uuid not in tests: if uuid not in lib_tests:
missing_uuids.append(test) missing_uuids.append(test)
else: else:
if tests[uuid] not in testnames: in_testnames = [test in lib_tests[uuid] for test in testnames]
if not any(in_testnames):
missing_tests[uuid] = test missing_tests[uuid] = test
exit_code = 0 exit_code = 0
@ -182,7 +200,7 @@ def run():
" idempotent_id:\n" " idempotent_id:\n"
" %s\n" " %s\n"
" names: " % (args.testlib, " names: " % (args.testlib,
uuid, tests[uuid], uuid, lib_tests[uuid],
args.guideline_file, args.guideline_file,
missing_tests[uuid][0])) missing_tests[uuid][0]))
for testname in missing_tests[uuid][1]: for testname in missing_tests[uuid][1]:

View File

@ -77,17 +77,21 @@ if [[ -z $SFSDIR ]]; then
git clone https://opendev.org/openstack/manila-tempest-plugin $SFSDIR git clone https://opendev.org/openstack/manila-tempest-plugin $SFSDIR
fi fi
pip install $TEMPESTDIR
pip install $DNSDIR
pip install $ORCHESTRATIONDIR
pip install $SFSDIR
export PYTHONPATH=$TEMPESTDIR:$DNSDIR:$ORCHESTRATIONDIR:$SFSDIR export PYTHONPATH=$TEMPESTDIR:$DNSDIR:$ORCHESTRATIONDIR:$SFSDIR
python3 ./tools/checktests.py --guideline guidelines/next.json python3 ./tools/checktests.py --guideline guidelines/next.json
exit_1=$? exit_1=$?
python3 ./tools/checktests.py --guideline add-ons/guidelines/dns.next.json --testlib designate_tempest_plugin # TODO(lpiwowar) The consistency check of designate_tempest_plugin is omitted until
exit_2=$? # https://bugs.launchpad.net/designate/+bug/1943115 is fixed.
# TODO(kopecmartin) In order to unblock gates, skip check of manila tempest plugin until the following bug is resolved: # python3 ./tools/checktests.py --guideline add-ons/guidelines/dns.next.json --testlib designate_tempest_plugin
# https://storyboard.openstack.org/#!/story/2009146 # exit_2=$?
# python3 ./tools/checktests.py --guideline add-ons/guidelines/shared_file_system.next.json --testlib manila_tempest_tests python3 ./tools/checktests.py --guideline add-ons/guidelines/shared_file_system.next.json --testlib manila_tempest_tests
# exit_3=$? exit_3=$?
# TODO(kopecmartin) consistency check of heat_tempest_plugin is omitted intentionally until we improve the # TODO(kopecmartin) consistency check of heat_tempest_plugin is omitted intentionally until we improve the
# checktests.py so that it detects ids of the heat_tempest_plugin.api tests which don't use decorator.idempotent_id # checktests.py so that it detects ids of the heat_tempest_plugin.api tests which don't use decorator.idempotent_id
# call to track the id # call to track the id
@ -96,10 +100,10 @@ exit_2=$?
python3 ./tools/checktests.py --guideline current_guideline python3 ./tools/checktests.py --guideline current_guideline
exit_5=$? exit_5=$?
python3 ./tools/checktests.py --guideline add-ons/dns_current_guideline --testlib designate_tempest_plugin # python3 ./tools/checktests.py --guideline add-ons/dns_current_guideline --testlib designate_tempest_plugin
exit_6=$? # exit_6=$?
# python3 ./tools/checktests.py --guideline add-ons/shared_file_system_current_guideline --testlib manila_tempest_tests python3 ./tools/checktests.py --guideline add-ons/shared_file_system_current_guideline --testlib manila_tempest_tests
# exit_7=$? exit_7=$?
# python3 ./tools/checktests.py --guideline add-ons/orchestration_current_guideline --testlib heat_tempest_plugin # python3 ./tools/checktests.py --guideline add-ons/orchestration_current_guideline --testlib heat_tempest_plugin
# exit_8=$? # exit_8=$?
@ -111,6 +115,5 @@ if [[ "${CLEANTEMPEST}" ]]; then
rm -rf $SFSDIR rm -rf $SFSDIR
fi fi
#! (( $exit_1 || $exit_2 || $exit_3 || $exit_4 || $exit_5 || $exit_6 || $exit_7 || $exit_8 )) #! (( $exit_1 || $exit_2 || $exit_3 || $exit_4 || $exit_5 || $exit_6 || $exit_7 || $exit_8 ))
! (( $exit_1 || $exit_2 || $exit_5 || $exit_6 )) ! (( $exit_1 || $exit_3 || $exit_5 || $exit_7 ))