Merge "Revert "Convert apply_network_config.sh to Python and add automated tests""

This commit is contained in:
Zuul 2025-02-26 19:20:22 +00:00 committed by Gerrit Code Review
commit 24a03f1ee6
21 changed files with 18 additions and 4550 deletions

1
.gitignore vendored
View File

@ -1,2 +1 @@
.tox
.stestr

View File

@ -7,13 +7,11 @@
- stx-puppet-linters
- stx-puppet-tox-pep8
- stx-puppet-tox-pylint
- puppet-manifests-tox-py39
gate:
jobs:
- stx-puppet-linters
- stx-puppet-tox-pep8
- stx-puppet-tox-pylint
- puppet-manifests-tox-py39
post:
jobs:
- stx-stx-puppet-upload-git-mirror
@ -43,19 +41,6 @@
vars:
python_version: 3.9
- job:
name: puppet-manifests-tox-py39
parent: openstack-tox-py39
description: |
Run py39 test for puppet-manifests
nodeset: debian-bullseye
files:
- puppet-manifests/*
vars:
tox_envlist: py39
python_version: 3.9
tox_extra_args: -c puppet-manifests/tox.ini
- job:
name: stx-stx-puppet-upload-git-mirror
parent: upload-git-mirror

View File

@ -1,3 +0,0 @@
[DEFAULT]
test_path=./tests
top_dir=./

View File

@ -14,7 +14,6 @@ ifdef ignore_puppet_warnings
else
install -m 755 -D bin/puppet-manifest-apply.sh $(BINDIR)/puppet-manifest-apply.sh
endif
install -m 755 -D bin/apply_network_config.py $(BINDIR)/apply_network_config.py
install -m 755 -D bin/apply_network_config.sh $(BINDIR)/apply_network_config.sh
install -m 755 -D bin/k8s_wait_for_endpoints_health.py $(BINDIR)/k8s_wait_for_endpoints_health.py
install -m 755 -D bin/kube-wait-control-plane-terminated.sh $(BINDIR)/kube-wait-control-plane-terminated.sh

File diff suppressed because it is too large Load Diff

View File

@ -7,8 +7,6 @@
#
################################################################################
# WARNING: This file is OBSOLETE, use apply_network_config.py instead
#
# Purpose of this script is to copy the puppet-built ifcfg-* network config
# files from the PUPPET_DIR to the ETC_DIR. Only files that are detected as

View File

@ -62,10 +62,10 @@ def k8s_wait_for_endpoints_health(tries=TRIES, try_sleep=TRY_SLEEP, timeout=TIME
healthz_endpoints = [APISERVER_READYZ_ENDPOINT, CONTROLLER_MANAGER_HEALTHZ_ENDPOINT,
SCHEDULER_HEALTHZ_ENDPOINT, KUBELET_HEALTHZ_ENDPOINT]
for endpoint in healthz_endpoints:
is_k8s_endpoint_healthy = kubernetes.k8s_health_check(tries=tries,
try_sleep=try_sleep,
timeout=timeout,
healthz_endpoint=endpoint)
is_k8s_endpoint_healthy = kubernetes.k8s_health_check(tries = tries,
try_sleep = try_sleep,
timeout = timeout,
healthz_endpoint = endpoint)
if not is_k8s_endpoint_healthy:
LOG.error("Timeout: Kubernetes control-plane endpoints not healthy")
return 1
@ -93,3 +93,4 @@ def main():
if __name__ == "__main__":
sys.exit(main())

View File

@ -5,8 +5,6 @@
#
################################################################################
# WARNING: This file is OBSOLETE
#
# This file purpose is to provide helper functions if the system is Debian based
# for the apply_network_config.sh script

View File

@ -5,8 +5,6 @@
#
################################################################################
# WARNING: This file is OBSOLETE
#
# This file purpose is to provide helper functions if the system is CentOS based
# for the apply_network_config.sh script

View File

@ -28,7 +28,6 @@ import sys
BOOT_ENV = "/boot/efi/EFI/BOOT/boot.env"
KERNEL_PARAMS_STRING = "kernel_params"
# Get value of kernel_params from conf
def read_kernel_params(conf):
"""Get value of kernel_params from conf"""
@ -47,7 +46,6 @@ def read_kernel_params(conf):
return res
# Write key=value string to conf
def write_conf(conf, string):
"""Write key=value string to conf"""
@ -61,7 +59,6 @@ def write_conf(conf, string):
print(err)
raise
def set_parser():
"""Set command parser"""
@ -113,7 +110,6 @@ def set_parser():
return parser
def convert_dict_to_value(kernel_params_dict):
"""Dictionary to value"""
@ -132,7 +128,6 @@ def convert_dict_to_value(kernel_params_dict):
return f"kernel_params={kernel_params}"
def convert_value_to_dict(value):
"""Value to dictionary"""
@ -161,6 +156,7 @@ def convert_value_to_dict(value):
else:
key, val = param, ''
kernel_params_dict[key] = val
if hugepage_cache:
@ -181,7 +177,6 @@ def convert_value_to_dict(value):
return kernel_params_dict
def edit_boot_env(args):
"""Edit boot environment"""
@ -217,7 +212,6 @@ def edit_boot_env(args):
kernel_params = convert_dict_to_value(kernel_params_dict)
write_conf(BOOT_ENV, kernel_params)
def get_kernel_dir():
"""Get kernel directory"""
@ -229,12 +223,11 @@ def get_kernel_dir():
return "/boot/1"
def edit_kernel_env(args):
"""Edit kernel environment"""
kernel_dir = get_kernel_dir()
path_all = os.path.join(kernel_dir, "vmlinuz*-amd64")
path_all = os.path.join(kernel_dir,"vmlinuz*-amd64")
path_rt = os.path.join(kernel_dir, "vmlinuz*rt*-amd64")
glob_all_kernels = [os.path.basename(f) for f in glob.glob(path_all)]
@ -261,7 +254,6 @@ def edit_kernel_env(args):
kernel_rollback_env = f"kernel_rollback={kernel}"
write_conf(kernel_env, kernel_rollback_env)
def list_kernels():
"""List kernels"""
@ -280,7 +272,6 @@ def list_kernels():
print(output)
def list_kernel_params():
"""List kernel params"""
@ -297,7 +288,6 @@ def list_kernel_params():
print(line)
break
def main():
"""Main"""
parser = set_parser()
@ -315,6 +305,5 @@ def main():
if args.list_kernel_params:
list_kernel_params()
if __name__ == "__main__":
main()

View File

@ -729,15 +729,14 @@ def get_kubelet_cfg_from_service_parameters(service_params):
# map[string]string & []string
if value.startswith(('[', '{')) and value.endswith((']', '}')):
try:
value = json.loads(
value.replace('True', 'true').replace('False', 'false').replace("'", '"'))
value = json.loads(value.replace('True', 'true').replace('False', 'false').replace("'", '"'))
except Exception as e:
msg = 'Parsing param: %s / value: %s. [Error: %s]' % (param, value, e)
LOG.error(msg)
return 3
# bool
elif value in ['False', 'false'] or value in ['True', 'true']:
value = True if value in ['True', 'true'] else False # pylint: disable-msg=simplifiable-if-expression # noqa: E501
value = True if value in ['True', 'true'] else False # pylint: disable-msg=simplifiable-if-expression
# float
elif '.' in value:
try:
@ -1158,8 +1157,7 @@ def main():
parser.add_argument("--kubelet_latest_config_file", default="/var/lib/kubelet/config.yaml")
parser.add_argument("--kubelet_bak_config_file", default="/var/lib/kubelet/config.yaml.bak")
parser.add_argument("--kubelet_error_log", default="/tmp/kubelet_errors.log")
parser.add_argument("--k8s_configmaps_init_flag",
default="/tmp/.sysinv_k8s_configmaps_initialized")
parser.add_argument("--k8s_configmaps_init_flag", default="/tmp/.sysinv_k8s_configmaps_initialized")
parser.add_argument("--automatic_recovery", default=True)
parser.add_argument("--timeout", default=RECOVERY_TIMEOUT)

View File

@ -800,7 +800,7 @@ class platform::network::apply {
-> Exec['apply-network-config']
exec {'apply-network-config':
command => 'apply_network_config.py',
command => 'apply_network_config.sh',
}
# Wait for network interface to leave tentative state during ipv6 DAD, if interface is UP
@ -890,7 +890,7 @@ class platform::network::routes::runtime {
}
exec {'apply-network-config route setup':
command => 'apply_network_config.py --routes',
command => 'apply_network_config.sh --routes',
}
}

View File

@ -1,3 +0,0 @@
mock>=2.0.0
stestr>=1.0.0
netaddr

View File

@ -1,5 +0,0 @@
#
# Copyright (c) 2025 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#

View File

@ -1,318 +0,0 @@
#
# Copyright (c) 2025 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import io
# Keys for filesystem node properties
PARENT = "parent"
TYPE = "type"
FILE = "file"
DIR = "dir"
LINK = "link"
CONTENTS = "contents"
TARGET = "target"
REF = "ref"
LISTENERS = "listeners"
class FilesystemMockError(BaseException):
pass
class FileMock():
def __init__(self, fs, entry):
self.fs = fs
self.entry = entry
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
pass
def readlines(self):
lines = self.entry[CONTENTS].split("\n")
out_lines = [line + "\n" for line in lines[:-1]]
if len(lines[-1]) > 0:
out_lines.append(lines[-1])
return out_lines
def read(self):
return self.entry[CONTENTS]
def write(self, contents):
if REF not in self.entry:
raise io.UnsupportedOperation("not writable")
self.entry[CONTENTS] += contents
class ReadOnlyFileContainer():
def __init__(self, contents=None):
self.next_id = 0
self.root = self._get_new_dir(None)
if contents:
self.batch_add(contents)
def batch_add(self, contents):
for path, data in contents.items():
if data is None:
self._add_dir(path)
elif type(data) == str:
self._add_file(path, data)
elif type(data) == tuple and len(data) == 1 and type(data[0]) == str:
self._add_link(path, data[0])
else:
raise FilesystemMockError("Invalid entry, must be None for directory, "
"str for file or tuple with 1 str element for link")
def get_root_node(self):
return self.root
@staticmethod
def _get_new_dir(parent):
return {PARENT: parent, TYPE: DIR, CONTENTS: dict()}
@staticmethod
def _get_new_file(parent, contents):
return {PARENT: parent, TYPE: FILE, CONTENTS: contents}
@staticmethod
def _get_new_link(parent, entry, target_path):
return {PARENT: parent, TYPE: LINK, CONTENTS: entry, TARGET: target_path}
def _do_add_dir(self, path_pieces):
def add_dir_rec(parent, pieces):
if len(pieces) == 0:
return parent
current = parent[CONTENTS].get(pieces[0], None)
if not current:
current = self._get_new_dir(parent)
parent[CONTENTS][pieces[0]] = current
return add_dir_rec(current, pieces[1:])
return add_dir_rec(self.root, path_pieces)
def _get_entry(self, path):
pieces = path.split("/")[1:]
def get_entry_rec(parent, pieces):
if len(pieces) == 0:
return parent
current = parent[CONTENTS].get(pieces[0], None)
if not current:
raise FilesystemMockError(f"Path not found: '{path}'")
return get_entry_rec(current, pieces[1:])
return get_entry_rec(self.root, pieces)
def _add_dir(self, path):
pieces = path.split("/")[1:]
self._do_add_dir(pieces)
def _add_file(self, path, contents):
pieces = path.split("/")[1:]
new_dir = self._do_add_dir(pieces[:-1])
file_entry = self._get_new_file(new_dir, contents)
new_dir[CONTENTS][pieces[-1]] = file_entry
def _add_link(self, path, ref_path):
pieces = path.split("/")[1:]
new_dir = self._do_add_dir(pieces[:-1])
ref_entry = self._get_entry(ref_path)
link_entry = self._get_new_link(new_dir, ref_entry, ref_path)
new_dir[CONTENTS][pieces[-1]] = link_entry
class FilesystemMock():
def __init__(self, contents: dict = None, fs: ReadOnlyFileContainer = None):
if fs is not None:
self.fs = fs
add_contents = True
else:
self.fs = ReadOnlyFileContainer(contents)
add_contents = False
self.root = self._get_new_entry(self.fs.get_root_node(), None)
if add_contents and contents:
self.batch_add(contents)
def batch_add(self, contents):
for path, data in contents.items():
if data is None:
self.create_directory(path)
elif type(data) == str:
self.set_file_contents(path, data)
elif type(data) == tuple and len(data) == 1 and type(data[0]) == str:
self.set_link_contents(path, data[0])
else:
raise FilesystemMockError("Invalid entry, must be None for directory, "
"str for file or tuple with 1 str element for link")
@staticmethod
def _get_new_entry(ref, parent, node_type=None):
if not node_type:
node_type = ref[TYPE]
entry = {REF: ref, PARENT: parent, TYPE: node_type}
if node_type == DIR:
entry[CONTENTS] = ref[CONTENTS].copy() if ref else dict()
elif node_type == LINK:
entry[CONTENTS] = ref[CONTENTS] if ref else None
entry[TARGET] = ref[TARGET] if ref else None
else:
entry[CONTENTS] = ''
return entry
def _get_entry(self, path, translate_link=False):
pieces = path.split("/")[1:]
def get_entry_rec(contents, pieces):
if len(pieces) == 0:
if translate_link and contents[TYPE] == LINK:
return contents[CONTENTS]
return contents
if contents[TYPE] == LINK:
contents = contents[CONTENTS]
if REF in contents and contents[CONTENTS] is None:
child = contents[REF][CONTENTS].get(pieces[0], None)
else:
child = contents[CONTENTS].get(pieces[0], None)
if child is None:
return None
return get_entry_rec(child, pieces[1:])
return get_entry_rec(self.root, pieces)
def _patch_entry(self, path, node_type):
pieces = path.split("/")[1:]
def translate_link(entry):
target = entry[CONTENTS]
if REF not in target:
target = self._patch_entry(entry[TARGET], target[TYPE])
entry[CONTENTS] = target
return target
def patch_entry_rec(level, entry, pieces):
if len(pieces) == 0:
if entry[TYPE] == LINK and node_type != LINK:
entry = translate_link(entry)
if entry[TYPE] != node_type:
if node_type == FILE:
raise IsADirectoryError(f"[Errno 21] Is a directory: '{path}'")
raise NotADirectoryError(f"[Errno 20] Not a directory: '{path}'")
return entry
if entry[TYPE] == LINK:
entry = translate_link(entry)
if entry[TYPE] != DIR:
raise NotADirectoryError(f"[Errno 20] Not a directory: '{path}'")
if entry[CONTENTS] is None:
entry[CONTENTS] = entry[REF][CONTENTS].copy()
child = entry[CONTENTS].get(pieces[0], None)
if child is None or REF not in child:
if child is None:
new_type = node_type if len(pieces) == 1 else DIR
child = self._get_new_entry(None, entry, new_type)
else:
child = self._get_new_entry(child, entry)
entry[CONTENTS][pieces[0]] = child
return patch_entry_rec(level + 1, child, pieces[1:])
return patch_entry_rec(0, self.root, pieces)
def exists(self, path):
entry = self._get_entry(path)
return entry is not None
def isfile(self, path):
entry = self._get_entry(path)
return entry and entry[TYPE] == FILE
def isdir(self, path):
entry = self._get_entry(path)
return entry and entry[TYPE] == DIR
def islink(self, path):
entry = self._get_entry(path)
return entry and entry[TYPE] == LINK
def open(self, path, mode="r"):
if "w" in mode:
entry = self._patch_entry(path, FILE)
else:
entry = self._get_entry(path, translate_link=True)
if not entry:
raise FileNotFoundError(f"[Errno 2] No such file or directory: '{path}'")
if entry[TYPE] == DIR:
raise IsADirectoryError(f"[Errno 21] Is a directory: '{path}'")
if "w" in mode:
self._call_listeners(entry)
return FileMock(self, entry)
def _call_listeners(self, entry):
if parent := entry[PARENT]:
self._call_listeners(parent)
if listeners := entry.get(LISTENERS, None):
for listener in listeners:
listener()
def create_directory(self, path):
entry = self._patch_entry(path, DIR)
self._call_listeners(entry)
def set_file_contents(self, path, contents):
entry = self._patch_entry(path, FILE)
entry[CONTENTS] = contents
self._call_listeners(entry)
def get_file_contents(self, path):
entry = self._get_entry(path, translate_link=True)
if entry is None:
raise FilesystemMockError("Path does not exist")
if entry[TYPE] != FILE:
raise FilesystemMockError("Path is not a file")
return entry[CONTENTS]
def set_link_contents(self, link_path, target_path):
target = self._get_entry(target_path)
if target is None:
raise FilesystemMockError("Target path does not exist")
entry = self._patch_entry(link_path, LINK)
entry[CONTENTS] = target
entry[TARGET] = target_path
self._call_listeners(entry)
def get_file_list(self, path):
entry = self._get_entry(path, translate_link=True)
if entry is None:
raise FilesystemMockError("Path does not exist")
if entry[TYPE] != DIR:
raise FilesystemMockError("Path is not a directory")
files = []
for name, child in entry[CONTENTS].items():
if child[TYPE] == FILE:
files.append(name)
files.sort()
return files
def add_listener(self, path, callback):
entry = self._get_entry(path, translate_link=True)
if entry is None:
raise FilesystemMockError("Path does not exist")
if REF not in entry:
entry = self._patch_entry(path, entry[TYPE])
listeners = entry.setdefault(LISTENERS, list())
listeners.append(callback)
def delete(self, path):
pieces = path.split("/")[1:]
entry = self._get_entry(path)
if entry is None:
raise FileNotFoundError(f"[Errno 2] No such file or directory: '{path}'")
pieces = path.split("/")
patched_entry = self._patch_entry("/".join(pieces[:-1]), DIR)
patched_entry[CONTENTS].pop(pieces[-1])
self._call_listeners(patched_entry)

View File

@ -1,36 +0,0 @@
#!/bin/bash
################################################################################
# Copyright (c) 2025 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
################################################################################
#
# This script is used by the automated tests
# tests.test_apply_network_config.GeneralTests.test_execute_system_cmd_timeout_*, it simulates a
# command that takes too long to terminate and triggers a timeout. In certain situations, the ifup
# command can exhibit this behavior.
#
return_code=$1
extra_sleep=$2
terminate()
{
echo "< SIGTERM RECEIVED >"
if [[ "$extra_sleep" == "-e" ]]; then
sleep 10
echo "< AFTER EXTRA SLEEP >"
fi
exit $return_code
}
trap terminate 15
echo "< BEFORE SLEEP >"
sleep 10
echo "< AFTER SLEEP >"

File diff suppressed because it is too large Load Diff

View File

@ -10,26 +10,12 @@
# and then run "tox" from this directory.
[tox]
toxworkdir = /tmp/{env:USER}_puppet-manifests
envlist = py39,puppetlint
envlist = puppetlint
skipsdist = True
[testenv]
recreate = True
[testenv:py39]
basepython = python3.9
sitepackages = False
setenv = VIRTUAL_ENV={envdir}
OS_TEST_PATH=./tests
deps =
-r{toxinidir}/test-requirements.txt
commands =
stestr run {posargs}
stestr slowest
[testenv:puppetlint]
# Note: centos developer env requires ruby-devel
# Ubuntu developer env requires ruby-dev

View File

@ -123,10 +123,8 @@ enable=E1603,E1609,E1610,E1602,E1606,E1608,E1607,E1605,E1604,E1601,E1611,W1652,
# See "Messages Control" section of
# https://pylint.readthedocs.io/en/latest/user_guide
# We are disabling (C)onvention
# W0201: attribute-defined-outside-init
# W1202: logging-format-interpolation
# W1618: no-absolute-import
disable=C, W0201,W1202,W1618
disable=C, W1618
[REPORTS]
# Set the output format. Available formats are text, parseable, colorized, msvs

View File

@ -4,5 +4,3 @@ bashate >= 0.2
bandit!=1.6.0,>=1.1.0,<2.0.0;python_version>="3.0" # GPLv2
shellcheck-py;python_version>="3.0" # MIT
netaddr >= 0.7.19
mock>=2.0.0
testtools>=1.4.0

14
tox.ini
View File

@ -141,7 +141,7 @@ description =
commands =
flake8 puppet-manifests
flake8 puppet-manifests/src/modules/platform/files
[testenv:pylint]
basepython = python3
@ -155,17 +155,11 @@ commands =
[flake8]
# E123, E125 skipped as they are invalid PEP-8.
# E126 continuation line over-indented for hanging indent
# E127 continuation line over-indented for visual indent
# H104: File contains nothing but comments
# H306: imports not in alphabetical order
# H404: multi line docstring should start without a leading new line
# E501 skipped because some of the code files include templates
# that end up quite wide
# H405: multi line docstring summary not separated with an empty line
# W504: line break after binary operator
show-source = True
ignore = E123,E125,E126,E127,H104,H306,H404,H405,W504
# Max line length set to 100 to coincide with opendev's code view width
max-line-length = 100
ignore = E123,E125,E501,H405,W504
exclude = .venv,.git,.tox,dist,doc,*lib/python*,*egg,build,release-tag-*
[testenv:bandit]