Fork cgcs-patch package as sw-patch for Debian
The original cgcs-patch is rpm based which requires a complete re-write to work on ostree/dpkg systems like Debian. The code has been forked, since the older Centos env and python2.7 are end-of-life. Forking the code allows all new development to not require re-testing on Centos. The debian folder under cgcs-patch has been moved under sw-patch Renaming and refactoring will be done in later commits. pylint is un-clamped in order to work on python3.9 Some minor pylint suppressions have been added. Test Plan: Verify that this builds on Debian Verify that the ISO installs the new content on Debian without breaking packages that import cgcs_patch. Verify patching service runs on Debian Co-Authored-By: Jessica Castelino <jessica.castelino@windriver.com> Story: 2009101 Task: 43076 Signed-off-by: Al Bailey <al.bailey@windriver.com> Change-Id: I3f1bca749404053bae63d4bcc9fb2477cf909fcd
This commit is contained in:
parent
0f682de55f
commit
be09ccc584
34
.zuul.yaml
34
.zuul.yaml
@ -15,6 +15,8 @@
|
|||||||
- patch-alarm-tox-pylint
|
- patch-alarm-tox-pylint
|
||||||
- patch-alarm-tox-py27
|
- patch-alarm-tox-py27
|
||||||
- patch-alarm-tox-py39
|
- patch-alarm-tox-py39
|
||||||
|
- sw-patch-tox-pylint
|
||||||
|
- sw-patch-tox-py39
|
||||||
gate:
|
gate:
|
||||||
jobs:
|
jobs:
|
||||||
- openstack-tox-linters
|
- openstack-tox-linters
|
||||||
@ -26,6 +28,8 @@
|
|||||||
- patch-alarm-tox-pylint
|
- patch-alarm-tox-pylint
|
||||||
- patch-alarm-tox-py27
|
- patch-alarm-tox-py27
|
||||||
- patch-alarm-tox-py39
|
- patch-alarm-tox-py39
|
||||||
|
- sw-patch-tox-pylint
|
||||||
|
- sw-patch-tox-py39
|
||||||
post:
|
post:
|
||||||
jobs:
|
jobs:
|
||||||
- stx-update-upload-git-mirror
|
- stx-update-upload-git-mirror
|
||||||
@ -79,7 +83,6 @@
|
|||||||
required-projects:
|
required-projects:
|
||||||
- starlingx/config
|
- starlingx/config
|
||||||
- starlingx/fault
|
- starlingx/fault
|
||||||
- starlingx/root
|
|
||||||
files:
|
files:
|
||||||
- cgcs-patch/cgcs-patch/*
|
- cgcs-patch/cgcs-patch/*
|
||||||
vars:
|
vars:
|
||||||
@ -87,6 +90,34 @@
|
|||||||
python_version: 3.9
|
python_version: 3.9
|
||||||
tox_extra_args: -c cgcs-patch/cgcs-patch/tox.ini
|
tox_extra_args: -c cgcs-patch/cgcs-patch/tox.ini
|
||||||
|
|
||||||
|
- job:
|
||||||
|
name: sw-patch-tox-py39
|
||||||
|
parent: tox-py39
|
||||||
|
nodeset: debian-bullseye
|
||||||
|
required-projects:
|
||||||
|
- starlingx/config
|
||||||
|
- starlingx/fault
|
||||||
|
files:
|
||||||
|
- sw-patch/cgcs-patch/*
|
||||||
|
vars:
|
||||||
|
tox_envlist: py39
|
||||||
|
python_version: 3.9
|
||||||
|
tox_extra_args: -c sw-patch/cgcs-patch/tox.ini
|
||||||
|
|
||||||
|
- job:
|
||||||
|
name: sw-patch-tox-pylint
|
||||||
|
parent: tox
|
||||||
|
nodeset: debian-bullseye
|
||||||
|
required-projects:
|
||||||
|
- starlingx/config
|
||||||
|
- starlingx/fault
|
||||||
|
files:
|
||||||
|
- sw-patch/cgcs-patch/*
|
||||||
|
vars:
|
||||||
|
tox_envlist: pylint
|
||||||
|
python_version: 3.9
|
||||||
|
tox_extra_args: -c sw-patch/cgcs-patch/tox.ini
|
||||||
|
|
||||||
|
|
||||||
- job:
|
- job:
|
||||||
name: patch-alarm-tox-pylint
|
name: patch-alarm-tox-pylint
|
||||||
@ -121,7 +152,6 @@
|
|||||||
required-projects:
|
required-projects:
|
||||||
- starlingx/config
|
- starlingx/config
|
||||||
- starlingx/fault
|
- starlingx/fault
|
||||||
- starlingx/root
|
|
||||||
files:
|
files:
|
||||||
- patch-alarm/patch-alarm/*
|
- patch-alarm/patch-alarm/*
|
||||||
vars:
|
vars:
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
# This is a cross-platform list tracking distribution packages needed for install and tests;
|
# This is a cross-platform list tracking distribution packages needed for install and tests;
|
||||||
# see https://docs.openstack.org/infra/bindep/ for additional information.
|
# see https://docs.openstack.org/infra/bindep/ for additional information.
|
||||||
|
# Do not install python2 rpms in a python3 only environment such as debian-bullseye
|
||||||
|
|
||||||
|
python-rpm [platform:dpkg !platform:debian-bullseye]
|
||||||
python3-rpm [platform:dpkg]
|
python3-rpm [platform:dpkg]
|
||||||
rpm-python [platform:rpm]
|
rpm-python [platform:rpm]
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
cgcs-patch
|
|
||||||
enable-dev-patch
|
enable-dev-patch
|
||||||
patch-alarm
|
patch-alarm
|
||||||
|
sw-patch
|
||||||
|
@ -3,9 +3,12 @@
|
|||||||
# process, which may cause wedges in the gate later.
|
# process, which may cause wedges in the gate later.
|
||||||
|
|
||||||
hacking>=1.1.0,<=2.0.0 # Apache-2.0
|
hacking>=1.1.0,<=2.0.0 # Apache-2.0
|
||||||
|
astroid <= 2.2.5
|
||||||
coverage!=4.4,>=4.0 # Apache-2.0
|
coverage!=4.4,>=4.0 # Apache-2.0
|
||||||
mock>=2.0.0 # BSD
|
mock>=2.0.0 # BSD
|
||||||
stestr>=1.0.0 # Apache-2.0
|
stestr>=1.0.0 # Apache-2.0
|
||||||
testtools>=2.2.0 # MIT
|
testtools>=2.2.0 # MIT
|
||||||
pycryptodomex
|
pycryptodomex
|
||||||
|
isort<5;python_version>="3.0"
|
||||||
|
pylint<2.1.0;python_version<"3.0" # GPLv2
|
||||||
|
pylint<2.4.0;python_version>="3.0" # GPLv2
|
||||||
|
@ -28,7 +28,7 @@ setenv = VIRTUAL_ENV={envdir}
|
|||||||
passenv =
|
passenv =
|
||||||
XDG_CACHE_HOME
|
XDG_CACHE_HOME
|
||||||
|
|
||||||
sitepackages = False
|
sitepackages = True
|
||||||
install_command = pip install \
|
install_command = pip install \
|
||||||
-v -v -v \
|
-v -v -v \
|
||||||
-c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/openstack/requirements/raw/branch/stable/stein/upper-constraints.txt} \
|
-c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/openstack/requirements/raw/branch/stable/stein/upper-constraints.txt} \
|
||||||
@ -64,7 +64,6 @@ install_command = pip install \
|
|||||||
-v -v -v \
|
-v -v -v \
|
||||||
-c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/starlingx/root/raw/branch/master/build-tools/requirements/debian/upper-constraints.txt} \
|
-c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/starlingx/root/raw/branch/master/build-tools/requirements/debian/upper-constraints.txt} \
|
||||||
{opts} {packages}
|
{opts} {packages}
|
||||||
{opts} {packages}
|
|
||||||
|
|
||||||
deps = -r{toxinidir}/requirements.txt
|
deps = -r{toxinidir}/requirements.txt
|
||||||
-r{toxinidir}/test-requirements.txt
|
-r{toxinidir}/test-requirements.txt
|
||||||
@ -110,17 +109,13 @@ basepython = python3
|
|||||||
deps = {[testenv]deps}
|
deps = {[testenv]deps}
|
||||||
flake8-bugbear
|
flake8-bugbear
|
||||||
usedevelop = False
|
usedevelop = False
|
||||||
#skip_install = True
|
|
||||||
commands =
|
commands =
|
||||||
flake8 {posargs} .
|
flake8 {posargs} .
|
||||||
|
|
||||||
[testenv:pylint]
|
[testenv:pylint]
|
||||||
|
basepython = python3
|
||||||
deps = {[testenv]deps}
|
deps = {[testenv]deps}
|
||||||
pylint
|
pylint
|
||||||
|
|
||||||
basepython = python2.7
|
|
||||||
sitepackages = False
|
|
||||||
|
|
||||||
commands = pylint patch_alarm --rcfile=./pylint.rc
|
commands = pylint patch_alarm --rcfile=./pylint.rc
|
||||||
|
|
||||||
[testenv:cover]
|
[testenv:cover]
|
||||||
|
15
sw-patch/bin/make_patch
Executable file
15
sw-patch/bin/make_patch
Executable file
@ -0,0 +1,15 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
#
|
||||||
|
# Copyright (c) 2013-2014 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from cgcs_make_patch.make_patch_functions import make_patch
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(make_patch())
|
15
sw-patch/bin/modify_patch
Executable file
15
sw-patch/bin/modify_patch
Executable file
@ -0,0 +1,15 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
#
|
||||||
|
# Copyright (c) 2013-2014 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from cgcs_make_patch.make_patch_functions import modify_patch
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(modify_patch())
|
52
sw-patch/bin/patch-functions
Normal file
52
sw-patch/bin/patch-functions
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
#
|
||||||
|
# Copyright (c) 2016 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#
|
||||||
|
|
||||||
|
#
|
||||||
|
# This bash source file provides variables and functions that
|
||||||
|
# may be used by in-service patching scripts.
|
||||||
|
#
|
||||||
|
|
||||||
|
# Source platform.conf, for nodetype and subfunctions
|
||||||
|
. /etc/platform/platform.conf
|
||||||
|
|
||||||
|
declare PATCH_SCRIPTDIR=/run/patching/patch-scripts
|
||||||
|
declare PATCH_FLAGDIR=/run/patching/patch-flags
|
||||||
|
declare -i PATCH_STATUS_OK=0
|
||||||
|
declare -i PATCH_STATUS_FAILED=1
|
||||||
|
|
||||||
|
declare logfile=/var/log/patching.log
|
||||||
|
declare NAME=$(basename $0)
|
||||||
|
|
||||||
|
function loginfo()
|
||||||
|
{
|
||||||
|
echo "`date "+%FT%T.%3N"`: $NAME: $*" >> $logfile
|
||||||
|
}
|
||||||
|
|
||||||
|
function is_controller()
|
||||||
|
{
|
||||||
|
[[ $nodetype == "controller" ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
function is_worker()
|
||||||
|
{
|
||||||
|
[[ $nodetype == "worker" ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
function is_storage()
|
||||||
|
{
|
||||||
|
[[ $nodetype == "storage" ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
function is_cpe()
|
||||||
|
{
|
||||||
|
[[ $nodetype == "controller" && $subfunction =~ worker ]]
|
||||||
|
}
|
||||||
|
|
||||||
|
function is_locked()
|
||||||
|
{
|
||||||
|
test -f /var/run/.node_locked
|
||||||
|
}
|
||||||
|
|
2
sw-patch/bin/patch-tmpdirs.conf
Normal file
2
sw-patch/bin/patch-tmpdirs.conf
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
d /run/patching 0700 root root -
|
||||||
|
|
16
sw-patch/bin/patch_build
Executable file
16
sw-patch/bin/patch_build
Executable file
@ -0,0 +1,16 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright (c) 2014 Wind River Systems, Inc.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from cgcs_patch.patch_functions import patch_build
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(patch_build())
|
||||||
|
|
27
sw-patch/bin/patch_check_goenabled.sh
Normal file
27
sw-patch/bin/patch_check_goenabled.sh
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Copyright (c) 2014 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#
|
||||||
|
|
||||||
|
# Patching "goenabled" check.
|
||||||
|
# If a patch has been applied on this node, it is now out-of-date and should be rebooted.
|
||||||
|
|
||||||
|
NAME=$(basename $0)
|
||||||
|
SYSTEM_CHANGED_FLAG=/var/run/node_is_patched
|
||||||
|
|
||||||
|
logfile=/var/log/patching.log
|
||||||
|
|
||||||
|
function LOG {
|
||||||
|
logger "$NAME: $*"
|
||||||
|
echo "`date "+%FT%T.%3N"`: $NAME: $*" >> $logfile
|
||||||
|
}
|
||||||
|
|
||||||
|
if [ -f $SYSTEM_CHANGED_FLAG ]; then
|
||||||
|
LOG "Node has been patched. Failing goenabled check."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
exit 0
|
||||||
|
|
7
sw-patch/bin/patching.conf
Normal file
7
sw-patch/bin/patching.conf
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
[runtime]
|
||||||
|
controller_multicast = 239.1.1.3
|
||||||
|
agent_multicast = 239.1.1.4
|
||||||
|
api_port = 5487
|
||||||
|
controller_port = 5488
|
||||||
|
agent_port = 5489
|
||||||
|
|
15
sw-patch/bin/patching.logrotate
Normal file
15
sw-patch/bin/patching.logrotate
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
/var/log/patching.log
|
||||||
|
/var/log/patching-api.log
|
||||||
|
/var/log/patching-insvc.log
|
||||||
|
{
|
||||||
|
nodateext
|
||||||
|
size 10M
|
||||||
|
start 1
|
||||||
|
rotate 10
|
||||||
|
missingok
|
||||||
|
notifempty
|
||||||
|
compress
|
||||||
|
delaycompress
|
||||||
|
copytruncate
|
||||||
|
}
|
||||||
|
|
19
sw-patch/bin/pmon-sw-patch-agent.conf
Normal file
19
sw-patch/bin/pmon-sw-patch-agent.conf
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
[process]
|
||||||
|
process = sw-patch-agent
|
||||||
|
pidfile = /var/run/sw-patch-agent.pid
|
||||||
|
script = /etc/init.d/sw-patch-agent
|
||||||
|
style = lsb ; ocf or lsb
|
||||||
|
severity = major ; Process failure severity
|
||||||
|
; critical : host is failed
|
||||||
|
; major : host is degraded
|
||||||
|
; minor : log is generated
|
||||||
|
restarts = 3 ; Number of back to back unsuccessful restarts before severity assertion
|
||||||
|
interval = 5 ; Number of seconds to wait between back-to-back unsuccessful restarts
|
||||||
|
debounce = 20 ; Number of seconds the process needs to run before declaring
|
||||||
|
; it as running O.K. after a restart.
|
||||||
|
; Time after which back-to-back restart count is cleared.
|
||||||
|
startuptime = 10 ; Seconds to wait after process start before starting the debounce monitor
|
||||||
|
mode = passive ; Monitoring mode: passive (default) or active
|
||||||
|
; passive: process death monitoring (default: always)
|
||||||
|
; active: heartbeat monitoring, i.e. request / response messaging
|
||||||
|
|
19
sw-patch/bin/pmon-sw-patch-controller-daemon.conf
Normal file
19
sw-patch/bin/pmon-sw-patch-controller-daemon.conf
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
[process]
|
||||||
|
process = sw-patch-controller-daemon
|
||||||
|
pidfile = /var/run/sw-patch-controller-daemon.pid
|
||||||
|
script = /etc/init.d/sw-patch-controller-daemon
|
||||||
|
style = lsb ; ocf or lsb
|
||||||
|
severity = major ; Process failure severity
|
||||||
|
; critical : host is failed
|
||||||
|
; major : host is degraded
|
||||||
|
; minor : log is generated
|
||||||
|
restarts = 3 ; Number of back to back unsuccessful restarts before severity assertion
|
||||||
|
interval = 5 ; Number of seconds to wait between back-to-back unsuccessful restarts
|
||||||
|
debounce = 20 ; Number of seconds the process needs to run before declaring
|
||||||
|
; it as running O.K. after a restart.
|
||||||
|
; Time after which back-to-back restart count is cleared.
|
||||||
|
startuptime = 10 ; Seconds to wait after process start before starting the debounce monitor
|
||||||
|
mode = passive ; Monitoring mode: passive (default) or active
|
||||||
|
; passive: process death monitoring (default: always)
|
||||||
|
; active: heartbeat monitoring, i.e. request / response messaging
|
||||||
|
|
5
sw-patch/bin/policy.json
Normal file
5
sw-patch/bin/policy.json
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"admin": "role:admin or role:administrator",
|
||||||
|
"admin_api": "is_admin:True",
|
||||||
|
"default": "rule:admin_api"
|
||||||
|
}
|
15
sw-patch/bin/query_patch
Executable file
15
sw-patch/bin/query_patch
Executable file
@ -0,0 +1,15 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
#
|
||||||
|
# Copyright (c) 2013-2014 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from cgcs_make_patch.make_patch_functions import query_patch
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(query_patch())
|
183
sw-patch/bin/rpm-audit
Executable file
183
sw-patch/bin/rpm-audit
Executable file
@ -0,0 +1,183 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Copyright (c) 2016 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#
|
||||||
|
|
||||||
|
if [[ $EUID -ne 0 ]]; then
|
||||||
|
echo "This utility must be run as root." >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
function show_usage()
|
||||||
|
{
|
||||||
|
cat <<EOF
|
||||||
|
Usage: [ --include-pyc ] [ --include-cfg ] --skip-multi [ pkg ... ]
|
||||||
|
|
||||||
|
This utility scans the installed RPMs to compare checksums of files.
|
||||||
|
By default, files flagged as config are skipped, as are python pyc files.
|
||||||
|
|
||||||
|
Optional arguments:
|
||||||
|
--include-pyc : Include pyc files in check
|
||||||
|
--include-cfg : Include config files in check
|
||||||
|
--skip-links : Skip symlink check
|
||||||
|
--skip-multi : Skip the search for files with multiple owners
|
||||||
|
pkg : Specify one or more packages to limit the scan
|
||||||
|
(implies --skip-multi)
|
||||||
|
|
||||||
|
EOF
|
||||||
|
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
declare INCLUDE_PYTHON_FILES="no"
|
||||||
|
declare INCLUDE_CFG_FILES="no"
|
||||||
|
declare CHECK_FOR_MULTI="yes"
|
||||||
|
declare CHECK_LINKS="yes"
|
||||||
|
declare TIS_ONLY="yes"
|
||||||
|
|
||||||
|
declare CHECK_RPM=
|
||||||
|
|
||||||
|
for arg in "$@"
|
||||||
|
do
|
||||||
|
case $arg in
|
||||||
|
-h|--help)
|
||||||
|
show_usage
|
||||||
|
;;
|
||||||
|
--include-pyc)
|
||||||
|
INCLUDE_PYTHON_FILES="yes"
|
||||||
|
;;
|
||||||
|
--include-cfg)
|
||||||
|
INCLUDE_CFG_FILES="yes"
|
||||||
|
;;
|
||||||
|
--skip-links)
|
||||||
|
CHECK_LINKS="no"
|
||||||
|
;;
|
||||||
|
--skip-multi)
|
||||||
|
CHECK_FOR_MULTI="no"
|
||||||
|
;;
|
||||||
|
--all-rpms)
|
||||||
|
TIS_ONLY="no"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
CHECK_RPM="$CHECK_RPM $arg"
|
||||||
|
CHECK_FOR_MULTI="no"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
function rpm_list()
|
||||||
|
{
|
||||||
|
if [ -n "$CHECK_RPM" ]
|
||||||
|
then
|
||||||
|
for pkg in $CHECK_RPM
|
||||||
|
do
|
||||||
|
echo $pkg
|
||||||
|
done
|
||||||
|
elif [ "$TIS_ONLY" = "yes" ]
|
||||||
|
then
|
||||||
|
rpm -qa | grep '\.tis\.' | sort
|
||||||
|
else
|
||||||
|
rpm -qa | sort
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
rpm_list | while read pkg
|
||||||
|
do
|
||||||
|
# Get the --dump from the pkg
|
||||||
|
rpm -q --queryformat "[%{FILENAMES}|%{FILEMD5S}|%{FILEFLAGS:fflags}|%{FILELINKTOS}\n]" $pkg | \
|
||||||
|
while IFS='|' read pname psum pflags plinkto
|
||||||
|
do
|
||||||
|
if [[ $pname == "(contains" ]]
|
||||||
|
then
|
||||||
|
# (contains no files)
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ $INCLUDE_CFG_FILES == "no" && $pflags =~ c ]]
|
||||||
|
then
|
||||||
|
# Skip file already flagged as config
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ $INCLUDE_PYTHON_FILES == "no" && $pname =~ \.py[co]$ ]]
|
||||||
|
then
|
||||||
|
# Skip python .pyo or .pyc file
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Directories and symlinks will have no checksum
|
||||||
|
if [[ -z $psum ]]
|
||||||
|
then
|
||||||
|
if [[ -n $plinkto && $CHECK_LINKS == "yes" ]]
|
||||||
|
then
|
||||||
|
# Check the symlink pointer
|
||||||
|
flinkto=$(readlink $pname)
|
||||||
|
if [[ "$flinkto" != "$plinkto" ]]
|
||||||
|
then
|
||||||
|
echo "Link Mismatch: $pname ($pkg)"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Does the file exist?
|
||||||
|
if [ ! -e "$pname" ]
|
||||||
|
then
|
||||||
|
echo "Missing: $pname ($pkg)"
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Has the file been replaced by a symlink? ie. update-alternatives
|
||||||
|
if [ -L "$pname" ]
|
||||||
|
then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
let -i sumlen=$(echo -n $psum | wc -c)
|
||||||
|
if [ $sumlen = 64 ]
|
||||||
|
then
|
||||||
|
sumcmd=sha256sum
|
||||||
|
else
|
||||||
|
sumcmd=md5sum
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo $psum $pname | $sumcmd --check --status
|
||||||
|
if [ $? -ne 0 ]
|
||||||
|
then
|
||||||
|
echo "Mismatch: $pname ($pkg)"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
done
|
||||||
|
|
||||||
|
|
||||||
|
function check_for_multi_master()
|
||||||
|
{
|
||||||
|
# Search for files owned by multiple packages
|
||||||
|
prev=
|
||||||
|
rpm_list | xargs rpm -q --queryformat "[%{FILENAMES}|%{=NAME}\n]" | sort | while IFS='|' read f p
|
||||||
|
do
|
||||||
|
if [ "$f" = "$prev" ]
|
||||||
|
then
|
||||||
|
echo $f
|
||||||
|
fi
|
||||||
|
prev=$f
|
||||||
|
done | sort -u | while read f
|
||||||
|
do
|
||||||
|
if [ ! -d "$f" ]
|
||||||
|
then
|
||||||
|
echo $f
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
if [ $CHECK_FOR_MULTI = "yes" ]
|
||||||
|
then
|
||||||
|
echo
|
||||||
|
echo
|
||||||
|
echo "The following files belong to multiple packages:"
|
||||||
|
echo
|
||||||
|
check_for_multi_master
|
||||||
|
fi
|
||||||
|
|
60
sw-patch/bin/run-patch-scripts
Normal file
60
sw-patch/bin/run-patch-scripts
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Copyright (c) 2016 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#
|
||||||
|
|
||||||
|
. /etc/patching/patch-functions
|
||||||
|
|
||||||
|
declare SCRIPTS=$(find $PATCH_SCRIPTDIR -type f -executable | sort)
|
||||||
|
declare -i NUM_SCRIPTS=$(echo "$SCRIPTS" | wc -l)
|
||||||
|
|
||||||
|
if [ $NUM_SCRIPTS -eq 0 ]
|
||||||
|
then
|
||||||
|
loginfo "No in-service patch scripts found."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
loginfo "Running $NUM_SCRIPTS in-service patch scripts"
|
||||||
|
|
||||||
|
declare SCRIPTLOG=/var/log/patching-insvc.log
|
||||||
|
cat <<EOF >>$SCRIPTLOG
|
||||||
|
############################################################
|
||||||
|
`date "+%FT%T.%3N"`: Running $NUM_SCRIPTS in-service patch scripts:
|
||||||
|
|
||||||
|
$SCRIPTS
|
||||||
|
|
||||||
|
############################################################
|
||||||
|
EOF
|
||||||
|
|
||||||
|
declare -i FAILURES=0
|
||||||
|
for cmd in $SCRIPTS
|
||||||
|
do
|
||||||
|
cat <<EOF >>$SCRIPTLOG
|
||||||
|
############################################################
|
||||||
|
`date "+%FT%T.%3N"`: Running $cmd
|
||||||
|
|
||||||
|
EOF
|
||||||
|
|
||||||
|
bash -x $cmd >>$SCRIPTLOG 2>&1
|
||||||
|
rc=$?
|
||||||
|
if [ $rc -ne $PATCH_STATUS_OK ]
|
||||||
|
then
|
||||||
|
let -i FAILURES++
|
||||||
|
fi
|
||||||
|
cat <<EOF >>$SCRIPTLOG
|
||||||
|
`date "+%FT%T.%3N"`: Completed running $cmd (rc=$rc)
|
||||||
|
############################################################
|
||||||
|
|
||||||
|
EOF
|
||||||
|
done
|
||||||
|
|
||||||
|
cat <<EOF >>$SCRIPTLOG
|
||||||
|
|
||||||
|
`date "+%FT%T.%3N"`: Completed running scripts with $FAILURES failures
|
||||||
|
############################################################
|
||||||
|
EOF
|
||||||
|
|
||||||
|
exit $FAILURES
|
||||||
|
|
182
sw-patch/bin/setup_patch_repo
Executable file
182
sw-patch/bin/setup_patch_repo
Executable file
@ -0,0 +1,182 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright (c) 2018-2020 Wind River Systems, Inc.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import getopt
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import rpm
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import tempfile
|
||||||
|
|
||||||
|
import cgcs_patch.patch_functions as pf
|
||||||
|
import cgcs_patch.patch_verify as pv
|
||||||
|
import cgcs_patch.constants as constants
|
||||||
|
|
||||||
|
import logging
|
||||||
|
logging.getLogger('main_logger')
|
||||||
|
logging.basicConfig(level=logging.INFO)
|
||||||
|
|
||||||
|
# Override the pv.dev_certificate_marker so we can verify signatures off-box
|
||||||
|
cgcs_patch_bindir = os.path.dirname(os.path.abspath(sys.argv[0]))
|
||||||
|
dev_cert_path = os.path.abspath(os.path.join(cgcs_patch_bindir, '../../enable-dev-patch/enable-dev-patch/dev_certificate_enable.bin'))
|
||||||
|
|
||||||
|
pv.dev_certificate_marker = dev_cert_path
|
||||||
|
|
||||||
|
def usage():
|
||||||
|
print "Usage: %s -o <repodir> <patch> ..." \
|
||||||
|
% os.path.basename(sys.argv[0])
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
try:
|
||||||
|
opts, remainder = getopt.getopt(sys.argv[1:],
|
||||||
|
'o:',
|
||||||
|
['output='])
|
||||||
|
except getopt.GetoptError:
|
||||||
|
usage()
|
||||||
|
|
||||||
|
output = None
|
||||||
|
|
||||||
|
for opt, arg in opts:
|
||||||
|
if opt == "--output" or opt == '-o':
|
||||||
|
output = arg
|
||||||
|
|
||||||
|
if output is None:
|
||||||
|
usage()
|
||||||
|
|
||||||
|
sw_version = os.environ['PLATFORM_RELEASE']
|
||||||
|
|
||||||
|
allpatches = pf.PatchData()
|
||||||
|
|
||||||
|
output = os.path.abspath(output)
|
||||||
|
|
||||||
|
pkgdir = os.path.join(output, 'Packages')
|
||||||
|
datadir = os.path.join(output, 'metadata')
|
||||||
|
committed_dir = os.path.join(datadir, 'committed')
|
||||||
|
|
||||||
|
if os.path.exists(output):
|
||||||
|
# Check to see if the expected structure already exists,
|
||||||
|
# maybe we're appending a patch.
|
||||||
|
if not os.path.exists(committed_dir) or not os.path.exists(pkgdir):
|
||||||
|
print "Packages or metadata dir missing from existing %s. Aborting..." % output
|
||||||
|
exit(1)
|
||||||
|
|
||||||
|
# Load the existing metadata
|
||||||
|
allpatches.load_all_metadata(committed_dir, constants.COMMITTED)
|
||||||
|
else:
|
||||||
|
os.mkdir(output, 0o755)
|
||||||
|
os.mkdir(datadir, 0o755)
|
||||||
|
os.mkdir(committed_dir, 0o755)
|
||||||
|
os.mkdir(pkgdir, 0o755)
|
||||||
|
|
||||||
|
# Save the current directory, so we can chdir back after
|
||||||
|
orig_wd = os.getcwd()
|
||||||
|
|
||||||
|
tmpdir = None
|
||||||
|
try:
|
||||||
|
for p in remainder:
|
||||||
|
fpath = os.path.abspath(p)
|
||||||
|
|
||||||
|
# Create a temporary working directory
|
||||||
|
tmpdir = tempfile.mkdtemp(prefix="patchrepo_")
|
||||||
|
|
||||||
|
# Change to the tmpdir
|
||||||
|
os.chdir(tmpdir)
|
||||||
|
|
||||||
|
print "Parsing %s" % fpath
|
||||||
|
pf.PatchFile.read_patch(fpath)
|
||||||
|
|
||||||
|
thispatch = pf.PatchData()
|
||||||
|
patch_id = thispatch.parse_metadata("metadata.xml", constants.COMMITTED)
|
||||||
|
|
||||||
|
if patch_id in allpatches.metadata:
|
||||||
|
print "Skipping %s as it's already in the repo" % patch_id
|
||||||
|
# Change back to original working dir
|
||||||
|
os.chdir(orig_wd)
|
||||||
|
|
||||||
|
shutil.rmtree(tmpdir)
|
||||||
|
tmpdir = None
|
||||||
|
|
||||||
|
continue
|
||||||
|
|
||||||
|
patch_sw_version = thispatch.query_line(patch_id, 'sw_version')
|
||||||
|
if patch_sw_version != sw_version:
|
||||||
|
raise Exception("%s is for release %s, not %s" % (patch_id, patch_sw_version, sw_version))
|
||||||
|
|
||||||
|
# Move the metadata to the "committed" dir, and the rpms to the Packages dir
|
||||||
|
shutil.move('metadata.xml', os.path.join(committed_dir, "%s-metadata.xml" % patch_id))
|
||||||
|
for f in thispatch.query_line(patch_id, 'contents'):
|
||||||
|
shutil.move(f, pkgdir)
|
||||||
|
|
||||||
|
allpatches.add_patch(patch_id, thispatch)
|
||||||
|
|
||||||
|
# Change back to original working dir
|
||||||
|
os.chdir(orig_wd)
|
||||||
|
|
||||||
|
shutil.rmtree(tmpdir)
|
||||||
|
tmpdir = None
|
||||||
|
except:
|
||||||
|
if tmpdir is not None:
|
||||||
|
# Change back to original working dir
|
||||||
|
os.chdir(orig_wd)
|
||||||
|
|
||||||
|
shutil.rmtree(tmpdir)
|
||||||
|
tmpdir = None
|
||||||
|
raise
|
||||||
|
|
||||||
|
allpatches.gen_release_groups_xml(sw_version, output)
|
||||||
|
|
||||||
|
# Purge unneeded RPMs
|
||||||
|
keep = {}
|
||||||
|
for patch_id in allpatches.metadata.keys():
|
||||||
|
for rpmname in allpatches.contents[patch_id]:
|
||||||
|
try:
|
||||||
|
pkgname, arch, pkgver = pf.parse_rpm_filename(rpmname)
|
||||||
|
except ValueError as e:
|
||||||
|
raise e
|
||||||
|
|
||||||
|
if pkgname not in keep:
|
||||||
|
keep[pkgname] = { arch: pkgver }
|
||||||
|
continue
|
||||||
|
elif arch not in keep[pkgname]:
|
||||||
|
keep[pkgname][arch] = pkgver
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Compare versions
|
||||||
|
keep_pkgver = keep[pkgname][arch]
|
||||||
|
if pkgver > keep_pkgver:
|
||||||
|
# Find the rpmname
|
||||||
|
keep_rpmname = keep_pkgver.generate_rpm_filename(pkgname, arch)
|
||||||
|
|
||||||
|
filename = os.path.join(pkgdir, keep_rpmname)
|
||||||
|
if os.path.exists(filename):
|
||||||
|
os.remove(filename)
|
||||||
|
|
||||||
|
# Keep the new pkgver
|
||||||
|
keep[pkgname][arch] = pkgver
|
||||||
|
else:
|
||||||
|
filename = os.path.join(pkgdir, rpmname)
|
||||||
|
if os.path.exists(filename):
|
||||||
|
os.remove(filename)
|
||||||
|
|
||||||
|
# Create the repo metadata
|
||||||
|
if os.path.exists('/usr/bin/createrepo_c'):
|
||||||
|
createrepo = '/usr/bin/createrepo_c'
|
||||||
|
else:
|
||||||
|
createrepo = 'createrepo'
|
||||||
|
|
||||||
|
os.chdir(output)
|
||||||
|
subprocess.check_call([createrepo, '-g', 'comps.xml', '.'])
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(main())
|
||||||
|
|
16
sw-patch/bin/sw-patch
Executable file
16
sw-patch/bin/sw-patch
Executable file
@ -0,0 +1,16 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright (c) 2014 Wind River Systems, Inc.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from cgcs_patch.patch_client import main
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
16
sw-patch/bin/sw-patch-agent
Executable file
16
sw-patch/bin/sw-patch-agent
Executable file
@ -0,0 +1,16 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright (c) 2014 Wind River Systems, Inc.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from cgcs_patch.patch_agent import main
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
94
sw-patch/bin/sw-patch-agent-init.sh
Executable file
94
sw-patch/bin/sw-patch-agent-init.sh
Executable file
@ -0,0 +1,94 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
#
|
||||||
|
# Copyright (c) 2014-2015 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#
|
||||||
|
|
||||||
|
# chkconfig: 345 26 30
|
||||||
|
|
||||||
|
### BEGIN INIT INFO
|
||||||
|
# Provides: sw-patch-agent
|
||||||
|
# Required-Start: $syslog
|
||||||
|
# Required-Stop: $syslog
|
||||||
|
# Default-Start: 2 3 5
|
||||||
|
# Default-Stop: 0 1 6
|
||||||
|
# Short-Description: sw-patch-agent
|
||||||
|
# Description: Provides the CGCS Patch Agent Daemon
|
||||||
|
### END INIT INFO
|
||||||
|
|
||||||
|
DESC="sw-patch-agent"
|
||||||
|
DAEMON="/usr/sbin/sw-patch-agent"
|
||||||
|
PIDFILE="/var/run/sw-patch-agent.pid"
|
||||||
|
PATCH_INSTALLING_FILE="/var/run/patch_installing"
|
||||||
|
|
||||||
|
start()
|
||||||
|
{
|
||||||
|
if [ -e $PIDFILE ]; then
|
||||||
|
PIDDIR=/proc/$(cat $PIDFILE)
|
||||||
|
if [ -d ${PIDDIR} ]; then
|
||||||
|
echo "$DESC already running."
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
echo "Removing stale PID file $PIDFILE"
|
||||||
|
rm -f $PIDFILE
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -n "Starting $DESC..."
|
||||||
|
|
||||||
|
start-stop-daemon --start --quiet --background \
|
||||||
|
--pidfile ${PIDFILE} --make-pidfile --exec ${DAEMON}
|
||||||
|
|
||||||
|
if [ $? -eq 0 ]; then
|
||||||
|
echo "done."
|
||||||
|
else
|
||||||
|
echo "failed."
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
stop()
|
||||||
|
{
|
||||||
|
if [ -f $PATCH_INSTALLING_FILE ]; then
|
||||||
|
echo "Patches are installing. Waiting for install to complete."
|
||||||
|
while [ -f $PATCH_INSTALLING_FILE ]; do
|
||||||
|
# Verify the agent is still running
|
||||||
|
pid=$(cat $PATCH_INSTALLING_FILE)
|
||||||
|
cat /proc/$pid/cmdline 2>/dev/null | grep -q $DAEMON
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
echo "Patch agent not running."
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
echo "Continuing with shutdown."
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -n "Stopping $DESC..."
|
||||||
|
start-stop-daemon --stop --quiet --pidfile $PIDFILE
|
||||||
|
if [ $? -eq 0 ]; then
|
||||||
|
echo "done."
|
||||||
|
else
|
||||||
|
echo "failed."
|
||||||
|
fi
|
||||||
|
rm -f $PIDFILE
|
||||||
|
}
|
||||||
|
|
||||||
|
case "$1" in
|
||||||
|
start)
|
||||||
|
start
|
||||||
|
;;
|
||||||
|
stop)
|
||||||
|
stop
|
||||||
|
;;
|
||||||
|
restart|force-reload)
|
||||||
|
stop
|
||||||
|
start
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Usage: $0 {start|stop|force-reload|restart}"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
exit 0
|
20
sw-patch/bin/sw-patch-agent-restart
Normal file
20
sw-patch/bin/sw-patch-agent-restart
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Copyright (c) 2016 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#
|
||||||
|
|
||||||
|
. /etc/patching/patch-functions
|
||||||
|
|
||||||
|
#
|
||||||
|
# Triggering a restart of the patching daemons is done by
|
||||||
|
# creating a flag file and letting the daemon handle the restart.
|
||||||
|
#
|
||||||
|
loginfo "Requesting restart of patch-agent"
|
||||||
|
|
||||||
|
restart_patch_agent_flag="/run/patching/.restart.patch-agent"
|
||||||
|
touch $restart_patch_agent_flag
|
||||||
|
|
||||||
|
exit 0
|
||||||
|
|
16
sw-patch/bin/sw-patch-agent.service
Normal file
16
sw-patch/bin/sw-patch-agent.service
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=StarlingX Patching Agent
|
||||||
|
After=syslog.target network-online.target sw-patch.service
|
||||||
|
Before=pmon.service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=forking
|
||||||
|
User=root
|
||||||
|
ExecStart=/etc/init.d/sw-patch-agent start
|
||||||
|
ExecStop=/etc/init.d/sw-patch-agent stop
|
||||||
|
ExecReload=/etc/init.d/sw-patch-agent restart
|
||||||
|
PIDFile=/var/run/sw-patch-agent.pid
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
|
16
sw-patch/bin/sw-patch-controller-daemon
Executable file
16
sw-patch/bin/sw-patch-controller-daemon
Executable file
@ -0,0 +1,16 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
|
||||||
|
"""
|
||||||
|
Copyright (c) 2014 Wind River Systems, Inc.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from cgcs_patch.patch_controller import main
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
78
sw-patch/bin/sw-patch-controller-daemon-init.sh
Executable file
78
sw-patch/bin/sw-patch-controller-daemon-init.sh
Executable file
@ -0,0 +1,78 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
#
|
||||||
|
# Copyright (c) 2014-2015 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#
|
||||||
|
|
||||||
|
# chkconfig: 345 25 30
|
||||||
|
|
||||||
|
### BEGIN INIT INFO
|
||||||
|
# Provides: sw-patch-controller-daemon
|
||||||
|
# Required-Start: $syslog
|
||||||
|
# Required-Stop: $syslog
|
||||||
|
# Default-Start: 2 3 5
|
||||||
|
# Default-Stop: 0 1 6
|
||||||
|
# Short-Description: sw-patch-controller-daemon
|
||||||
|
# Description: Provides the CGCS Patch Controller Daemon
|
||||||
|
### END INIT INFO
|
||||||
|
|
||||||
|
DESC="sw-patch-controller-daemon"
|
||||||
|
DAEMON="/usr/sbin/sw-patch-controller-daemon"
|
||||||
|
PIDFILE="/var/run/sw-patch-controller-daemon.pid"
|
||||||
|
|
||||||
|
start()
|
||||||
|
{
|
||||||
|
if [ -e $PIDFILE ]; then
|
||||||
|
PIDDIR=/proc/$(cat $PIDFILE)
|
||||||
|
if [ -d ${PIDDIR} ]; then
|
||||||
|
echo "$DESC already running."
|
||||||
|
exit 1
|
||||||
|
else
|
||||||
|
echo "Removing stale PID file $PIDFILE"
|
||||||
|
rm -f $PIDFILE
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo -n "Starting $DESC..."
|
||||||
|
|
||||||
|
start-stop-daemon --start --quiet --background \
|
||||||
|
--pidfile ${PIDFILE} --make-pidfile --exec ${DAEMON}
|
||||||
|
|
||||||
|
if [ $? -eq 0 ]; then
|
||||||
|
echo "done."
|
||||||
|
else
|
||||||
|
echo "failed."
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
stop()
|
||||||
|
{
|
||||||
|
echo -n "Stopping $DESC..."
|
||||||
|
start-stop-daemon --stop --quiet --pidfile $PIDFILE
|
||||||
|
if [ $? -eq 0 ]; then
|
||||||
|
echo "done."
|
||||||
|
else
|
||||||
|
echo "failed."
|
||||||
|
fi
|
||||||
|
rm -f $PIDFILE
|
||||||
|
}
|
||||||
|
|
||||||
|
case "$1" in
|
||||||
|
start)
|
||||||
|
start
|
||||||
|
;;
|
||||||
|
stop)
|
||||||
|
stop
|
||||||
|
;;
|
||||||
|
restart|force-reload)
|
||||||
|
stop
|
||||||
|
start
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Usage: $0 {start|stop|force-reload|restart}"
|
||||||
|
exit 1
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
exit 0
|
20
sw-patch/bin/sw-patch-controller-daemon-restart
Normal file
20
sw-patch/bin/sw-patch-controller-daemon-restart
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Copyright (c) 2016 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#
|
||||||
|
|
||||||
|
. /etc/patching/patch-functions
|
||||||
|
|
||||||
|
#
|
||||||
|
# Triggering a restart of the patching daemons is done by
|
||||||
|
# creating a flag file and letting the daemon handle the restart.
|
||||||
|
#
|
||||||
|
loginfo "Requesting restart of patch-controller"
|
||||||
|
|
||||||
|
restart_patch_controller_flag="/run/patching/.restart.patch-controller"
|
||||||
|
touch $restart_patch_controller_flag
|
||||||
|
|
||||||
|
exit 0
|
||||||
|
|
16
sw-patch/bin/sw-patch-controller-daemon.service
Normal file
16
sw-patch/bin/sw-patch-controller-daemon.service
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=StarlingX Patching Controller Daemon
|
||||||
|
After=syslog.target network-online.target sw-patch.service sw-patch-controller.service
|
||||||
|
Before=pmon.service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=forking
|
||||||
|
User=root
|
||||||
|
ExecStart=/etc/init.d/sw-patch-controller-daemon start
|
||||||
|
ExecStop=/etc/init.d/sw-patch-controller-daemon stop
|
||||||
|
ExecReload=/etc/init.d/sw-patch-controller-daemon restart
|
||||||
|
PIDFile=/var/run/sw-patch-controller-daemon.pid
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
|
106
sw-patch/bin/sw-patch-controller-init.sh
Normal file
106
sw-patch/bin/sw-patch-controller-init.sh
Normal file
@ -0,0 +1,106 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Copyright (c) 2014 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#
|
||||||
|
# StarlingX Patching Controller setup
|
||||||
|
# chkconfig: 345 20 24
|
||||||
|
# description: CGCS Patching Controller init script
|
||||||
|
|
||||||
|
### BEGIN INIT INFO
|
||||||
|
# Provides: sw-patch-controller
|
||||||
|
# Required-Start: $syslog
|
||||||
|
# Required-Stop: $syslog
|
||||||
|
# Default-Start: 2 3 5
|
||||||
|
# Default-Stop: 0 1 6
|
||||||
|
# Short-Description: sw-patch-controller
|
||||||
|
# Description: Provides the StarlingX Patch Controller Daemon
|
||||||
|
### END INIT INFO
|
||||||
|
|
||||||
|
. /usr/bin/tsconfig
|
||||||
|
|
||||||
|
NAME=$(basename $0)
|
||||||
|
|
||||||
|
REPO_ID=updates
|
||||||
|
REPO_ROOT=/var/www/pages/${REPO_ID}
|
||||||
|
REPO_DIR=${REPO_ROOT}/rel-${SW_VERSION}
|
||||||
|
GROUPS_FILE=$REPO_DIR/comps.xml
|
||||||
|
PATCHING_DIR=/opt/patching
|
||||||
|
|
||||||
|
logfile=/var/log/patching.log
|
||||||
|
|
||||||
|
function LOG {
|
||||||
|
logger "$NAME: $*"
|
||||||
|
echo "`date "+%FT%T.%3N"`: $NAME: $*" >> $logfile
|
||||||
|
}
|
||||||
|
|
||||||
|
function LOG_TO_FILE {
|
||||||
|
echo "`date "+%FT%T.%3N"`: $NAME: $*" >> $logfile
|
||||||
|
}
|
||||||
|
|
||||||
|
function create_groups {
|
||||||
|
if [ -f $GROUPS_FILE ]; then
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
cat >$GROUPS_FILE <<EOF
|
||||||
|
<comps>
|
||||||
|
</comps>
|
||||||
|
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
function do_setup {
|
||||||
|
# Does the repo exist?
|
||||||
|
if [ ! -d $REPO_DIR ]; then
|
||||||
|
LOG "Creating repo"
|
||||||
|
mkdir -p $REPO_DIR
|
||||||
|
|
||||||
|
# Setup the groups file
|
||||||
|
create_groups
|
||||||
|
|
||||||
|
createrepo -g $GROUPS_FILE $REPO_DIR >> $logfile 2>&1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ! -d $PATCHING_DIR ]; then
|
||||||
|
LOG "Creating $PATCHING_DIR"
|
||||||
|
mkdir -p $PATCHING_DIR
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If we can ping the active controller, sync the repos
|
||||||
|
LOG_TO_FILE "ping -c 1 -w 1 controller"
|
||||||
|
ping -c 1 -w 1 controller >> $logfile 2>&1 || ping6 -c 1 -w 1 controller >> $logfile 2>&1
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
LOG "Cannot ping controller. Nothing to do"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Sync the patching dir
|
||||||
|
LOG_TO_FILE "rsync -acv --delete rsync://controller/patching/ ${PATCHING_DIR}/"
|
||||||
|
rsync -acv --delete rsync://controller/patching/ ${PATCHING_DIR}/ >> $logfile 2>&1
|
||||||
|
|
||||||
|
# Sync the patching dir
|
||||||
|
LOG_TO_FILE "rsync -acv --delete rsync://controller/repo/ ${REPO_ROOT}/"
|
||||||
|
rsync -acv --delete rsync://controller/repo/ ${REPO_ROOT}/ >> $logfile 2>&1
|
||||||
|
}
|
||||||
|
|
||||||
|
case "$1" in
|
||||||
|
start)
|
||||||
|
do_setup
|
||||||
|
;;
|
||||||
|
status)
|
||||||
|
;;
|
||||||
|
stop)
|
||||||
|
# Nothing to do here
|
||||||
|
;;
|
||||||
|
restart)
|
||||||
|
do_setup
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Usage: $0 {status|start|stop|restart}"
|
||||||
|
exit 1
|
||||||
|
esac
|
||||||
|
|
||||||
|
exit 0
|
||||||
|
|
14
sw-patch/bin/sw-patch-controller.service
Normal file
14
sw-patch/bin/sw-patch-controller.service
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=StarlingX Patching Controller
|
||||||
|
After=syslog.service network-online.target sw-patch.service
|
||||||
|
Before=sw-patch-agent.service sw-patch-controller-daemon.service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=oneshot
|
||||||
|
User=root
|
||||||
|
ExecStart=/etc/init.d/sw-patch-controller start
|
||||||
|
RemainAfterExit=yes
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
|
178
sw-patch/bin/sw-patch-init.sh
Normal file
178
sw-patch/bin/sw-patch-init.sh
Normal file
@ -0,0 +1,178 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Copyright (c) 2014-2020 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#
|
||||||
|
# StarlingX Patching
|
||||||
|
# chkconfig: 345 20 23
|
||||||
|
# description: StarlingX Patching init script
|
||||||
|
|
||||||
|
### BEGIN INIT INFO
|
||||||
|
# Provides: sw-patch
|
||||||
|
# Required-Start: $syslog
|
||||||
|
# Required-Stop: $syslog
|
||||||
|
# Default-Start: 2 3 5
|
||||||
|
# Default-Stop: 0 1 6
|
||||||
|
# Short-Description: sw-patch
|
||||||
|
# Description: Provides the StarlingX Patching
|
||||||
|
### END INIT INFO
|
||||||
|
|
||||||
|
NAME=$(basename $0)
|
||||||
|
|
||||||
|
. /usr/bin/tsconfig
|
||||||
|
. /etc/platform/platform.conf
|
||||||
|
|
||||||
|
logfile=/var/log/patching.log
|
||||||
|
patch_failed_file=/var/run/patch_install_failed
|
||||||
|
patched_during_init=/etc/patching/.patched_during_init
|
||||||
|
|
||||||
|
function LOG_TO_FILE {
|
||||||
|
echo "`date "+%FT%T.%3N"`: $NAME: $*" >> $logfile
|
||||||
|
}
|
||||||
|
|
||||||
|
function check_for_rr_patch {
|
||||||
|
if [ -f /var/run/node_is_patched_rr ]; then
|
||||||
|
if [ ! -f ${patched_during_init} ]; then
|
||||||
|
echo
|
||||||
|
echo "Node has been patched and requires an immediate reboot."
|
||||||
|
echo
|
||||||
|
LOG_TO_FILE "Node has been patched, with reboot-required flag set. Rebooting"
|
||||||
|
touch ${patched_during_init}
|
||||||
|
/sbin/reboot
|
||||||
|
else
|
||||||
|
echo
|
||||||
|
echo "Node has been patched during init a second consecutive time. Skipping reboot due to possible error"
|
||||||
|
echo
|
||||||
|
LOG_TO_FILE "Node has been patched during init a second consecutive time. Skipping reboot due to possible error"
|
||||||
|
touch ${patch_failed_file}
|
||||||
|
rm -f ${patched_during_init}
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
rm -f ${patched_during_init}
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function check_install_uuid {
|
||||||
|
# Check whether our installed load matches the active controller
|
||||||
|
CONTROLLER_UUID=`curl -sf http://controller:${http_port}/feed/rel-${SW_VERSION}/install_uuid`
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
if [ "$HOSTNAME" = "controller-1" ]; then
|
||||||
|
# If we're on controller-1, controller-0 may not have the install_uuid
|
||||||
|
# matching this release, if we're in an upgrade. If the file doesn't exist,
|
||||||
|
# bypass this check
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
LOG_TO_FILE "Unable to retrieve installation uuid from active controller"
|
||||||
|
echo "Unable to retrieve installation uuid from active controller"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$INSTALL_UUID" != "$CONTROLLER_UUID" ]; then
|
||||||
|
LOG_TO_FILE "This node is running a different load than the active controller and must be reinstalled"
|
||||||
|
echo "This node is running a different load than the active controller and must be reinstalled"
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check for installation failure
|
||||||
|
if [ -f /etc/platform/installation_failed ] ; then
|
||||||
|
LOG_TO_FILE "/etc/platform/installation_failed flag is set. Aborting."
|
||||||
|
echo "$(basename $0): Detected installation failure. Aborting."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Clean up the RPM DB
|
||||||
|
if [ ! -f /var/run/.rpmdb_cleaned ]; then
|
||||||
|
LOG_TO_FILE "Cleaning RPM DB"
|
||||||
|
rm -f /var/lib/rpm/__db*
|
||||||
|
touch /var/run/.rpmdb_cleaned
|
||||||
|
fi
|
||||||
|
|
||||||
|
# For AIO-SX, abort if config is not yet applied and this is running in init
|
||||||
|
if [ "${system_mode}" = "simplex" -a ! -f ${INITIAL_CONTROLLER_CONFIG_COMPLETE} -a "$1" = "start" ]; then
|
||||||
|
LOG_TO_FILE "Config is not yet applied. Skipping init patching"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If the management interface is bonded, it may take some time
|
||||||
|
# before communications can be properly setup.
|
||||||
|
# Allow up to $DELAY_SEC seconds to reach controller.
|
||||||
|
DELAY_SEC=120
|
||||||
|
START=`date +%s`
|
||||||
|
FOUND=0
|
||||||
|
while [ $(date +%s) -lt $(( ${START} + ${DELAY_SEC} )) ]; do
|
||||||
|
ping -c 1 controller > /dev/null 2>&1 || ping6 -c 1 controller > /dev/null 2>&1
|
||||||
|
if [ $? -eq 0 ]; then
|
||||||
|
FOUND=1
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ ${FOUND} -eq 0 ]; then
|
||||||
|
# 'controller' is not available, just exit
|
||||||
|
LOG_TO_FILE "Unable to contact active controller (controller). Boot will continue."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
RC=0
|
||||||
|
case "$1" in
|
||||||
|
start)
|
||||||
|
if [ "${system_mode}" = "simplex" ]; then
|
||||||
|
# On a simplex CPE, we need to launch the http server first,
|
||||||
|
# before we can do the patch installation
|
||||||
|
LOG_TO_FILE "***** Launching lighttpd *****"
|
||||||
|
/etc/init.d/lighttpd start
|
||||||
|
|
||||||
|
LOG_TO_FILE "***** Starting patch operation *****"
|
||||||
|
/usr/sbin/sw-patch-agent --install 2>>$logfile
|
||||||
|
if [ -f ${patch_failed_file} ]; then
|
||||||
|
RC=1
|
||||||
|
LOG_TO_FILE "***** Patch operation failed *****"
|
||||||
|
fi
|
||||||
|
LOG_TO_FILE "***** Finished patch operation *****"
|
||||||
|
|
||||||
|
LOG_TO_FILE "***** Shutting down lighttpd *****"
|
||||||
|
/etc/init.d/lighttpd stop
|
||||||
|
else
|
||||||
|
check_install_uuid
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
# The INSTALL_UUID doesn't match the active controller, so exit
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
LOG_TO_FILE "***** Starting patch operation *****"
|
||||||
|
/usr/sbin/sw-patch-agent --install 2>>$logfile
|
||||||
|
if [ -f ${patch_failed_file} ]; then
|
||||||
|
RC=1
|
||||||
|
LOG_TO_FILE "***** Patch operation failed *****"
|
||||||
|
fi
|
||||||
|
LOG_TO_FILE "***** Finished patch operation *****"
|
||||||
|
fi
|
||||||
|
|
||||||
|
check_for_rr_patch
|
||||||
|
;;
|
||||||
|
stop)
|
||||||
|
# Nothing to do here
|
||||||
|
;;
|
||||||
|
restart)
|
||||||
|
LOG_TO_FILE "***** Starting patch operation *****"
|
||||||
|
/usr/sbin/sw-patch-agent --install 2>>$logfile
|
||||||
|
if [ -f ${patch_failed_file} ]; then
|
||||||
|
RC=1
|
||||||
|
LOG_TO_FILE "***** Patch operation failed *****"
|
||||||
|
fi
|
||||||
|
LOG_TO_FILE "***** Finished patch operation *****"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "Usage: $0 {start|stop|restart}"
|
||||||
|
exit 1
|
||||||
|
esac
|
||||||
|
|
||||||
|
exit $RC
|
||||||
|
|
148
sw-patch/bin/sw-patch.completion
Normal file
148
sw-patch/bin/sw-patch.completion
Normal file
@ -0,0 +1,148 @@
|
|||||||
|
#
|
||||||
|
# Copyright (c) 2016 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#
|
||||||
|
|
||||||
|
#
|
||||||
|
# This file provides bash-completion functionality for the sw-patch CLI
|
||||||
|
#
|
||||||
|
|
||||||
|
function _swpatch()
|
||||||
|
{
|
||||||
|
COMPREPLY=()
|
||||||
|
local cur="${COMP_WORDS[COMP_CWORD]}"
|
||||||
|
local prev="${COMP_WORDS[COMP_CWORD-1]}"
|
||||||
|
local subcommand=${COMP_WORDS[1]}
|
||||||
|
|
||||||
|
#
|
||||||
|
# The available sw-patch subcommands
|
||||||
|
#
|
||||||
|
local subcommands="
|
||||||
|
apply
|
||||||
|
commit
|
||||||
|
delete
|
||||||
|
query
|
||||||
|
query-dependencies
|
||||||
|
query-hosts
|
||||||
|
remove
|
||||||
|
show
|
||||||
|
upload
|
||||||
|
upload-dir
|
||||||
|
what-requires
|
||||||
|
drop-host
|
||||||
|
is-applied
|
||||||
|
is-available
|
||||||
|
report-app-dependencies
|
||||||
|
query-app-dependencies
|
||||||
|
"
|
||||||
|
if [ -f /etc/platform/.initial_config_complete ]; then
|
||||||
|
# Post-config, so the host-install commands are accessible
|
||||||
|
subcommands="${subcommands} host-install host-install-async"
|
||||||
|
else
|
||||||
|
# Pre-config, so the install-local command is accessible
|
||||||
|
subcommands="${subcommands} install-local"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Appends the '/' when completing dir names
|
||||||
|
set mark-directories on
|
||||||
|
|
||||||
|
if [ $COMP_CWORD -gt 1 ]; then
|
||||||
|
#
|
||||||
|
# Complete the arguments to the subcommands.
|
||||||
|
#
|
||||||
|
case "$subcommand" in
|
||||||
|
apply|delete|show|what-requires|is-applied|is-available)
|
||||||
|
# Query the list of known patches
|
||||||
|
local patches=$(sw-patch completion patches 2>/dev/null)
|
||||||
|
COMPREPLY=( $(compgen -W "${patches}" -- ${cur}) )
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
remove)
|
||||||
|
# Query the list of known patches
|
||||||
|
local patches=$(sw-patch completion patches 2>/dev/null)
|
||||||
|
COMPREPLY=( $(compgen -W "--skipappcheck ${patches}" -- ${cur}) )
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
host-install|host-install-async|drop-host)
|
||||||
|
if [ "${prev}" = "${subcommand}" -o "${prev}" = "--force" ]; then
|
||||||
|
# Query the list of known hosts
|
||||||
|
local names=$(sw-patch completion hosts 2>/dev/null)
|
||||||
|
COMPREPLY=( $(compgen -W "${names}" -- ${cur}) )
|
||||||
|
else
|
||||||
|
# Only one host can be specified, so no more completion
|
||||||
|
COMPREPLY=( $(compgen -- ${cur}) )
|
||||||
|
fi
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
upload)
|
||||||
|
# Allow dirs and files with .patch extension for completion
|
||||||
|
COMPREPLY=( $(compgen -f -o plusdirs -X '!*.patch' -- ${cur}) )
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
upload-dir)
|
||||||
|
# Allow dirs only for completion
|
||||||
|
COMPREPLY=( $(compgen -d -- ${cur}) )
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
query)
|
||||||
|
if [ "${prev}" = "--release" ]; then
|
||||||
|
# If --release has been specified, provide installed releases for completion
|
||||||
|
local releases=$(/bin/ls -d /var/www/pages/feed/rel-* 2>/dev/null | sed 's#/var/www/pages/feed/rel-##')
|
||||||
|
COMPREPLY=( $(compgen -W "${releases}" -- ${cur}) )
|
||||||
|
else
|
||||||
|
# --release is only completion option for query
|
||||||
|
COMPREPLY=( $(compgen -W "--release" -- ${cur}) )
|
||||||
|
fi
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
query-hosts|install-local)
|
||||||
|
# These subcommands have no options/arguments
|
||||||
|
COMPREPLY=( $(compgen -- ${cur}) )
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
query-dependencies)
|
||||||
|
# Query the list of known patches
|
||||||
|
local patches=$(sw-patch completion patches 2>/dev/null)
|
||||||
|
COMPREPLY=( $(compgen -W "--recursive ${patches}" -- ${cur}) )
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
commit)
|
||||||
|
if [ "${prev}" = "--release" ]; then
|
||||||
|
# If --release has been specified, provide installed releases for completion
|
||||||
|
local releases=$(/bin/ls -d /var/www/pages/feed/rel-* 2>/dev/null | sed 's#/var/www/pages/feed/rel-##')
|
||||||
|
COMPREPLY=( $(compgen -W "${releases}" -- ${cur}) )
|
||||||
|
else
|
||||||
|
# Query the list of known patches
|
||||||
|
local patches=$(sw-patch completion patches 2>/dev/null)
|
||||||
|
COMPREPLY=( $(compgen -W "--all --dry-run --release ${patches}" -- ${cur}) )
|
||||||
|
fi
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
report-app-dependencies)
|
||||||
|
if [ "${prev}" = "${subcommand}" ]; then
|
||||||
|
COMPREPLY=( $(compgen -W "--app" -- ${cur}) )
|
||||||
|
elif [ "${prev}" = "--app" ]; then
|
||||||
|
COMPREPLY=
|
||||||
|
else
|
||||||
|
local patches=$(sw-patch completion patches 2>/dev/null)
|
||||||
|
COMPREPLY=( $(compgen -W "${patches}" -- ${cur}) )
|
||||||
|
fi
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
query-app-dependencies)
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Provide subcommands for completion
|
||||||
|
COMPREPLY=($(compgen -W "${subcommands}" -- ${cur}))
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# Bind the above function to the sw-patch CLI
|
||||||
|
complete -F _swpatch -o filenames sw-patch
|
||||||
|
|
16
sw-patch/bin/sw-patch.service
Normal file
16
sw-patch/bin/sw-patch.service
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=StarlingX Patching
|
||||||
|
After=syslog.target network-online.target
|
||||||
|
Before=sw-patch-agent.service
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=oneshot
|
||||||
|
User=root
|
||||||
|
ExecStart=/etc/init.d/sw-patch start
|
||||||
|
RemainAfterExit=yes
|
||||||
|
StandardOutput=syslog+console
|
||||||
|
StandardError=syslog+console
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
|
|
137
sw-patch/bin/upgrade-start-pkg-extract
Normal file
137
sw-patch/bin/upgrade-start-pkg-extract
Normal file
@ -0,0 +1,137 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Copyright (c) 2018-2021 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#
|
||||||
|
|
||||||
|
function show_usage()
|
||||||
|
{
|
||||||
|
cat >&2 <<EOF
|
||||||
|
$(basename $0): -r <release>
|
||||||
|
|
||||||
|
This tool will extract required packages to support upgrade-start
|
||||||
|
|
||||||
|
Options:
|
||||||
|
-r <release> : Release ID for target release.
|
||||||
|
|
||||||
|
EOF
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
. /etc/build.info
|
||||||
|
if [ -z "${SW_VERSION}" ]; then
|
||||||
|
logger -t $0 "Unable to identify running release ID"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
declare TGT_RELEASE=
|
||||||
|
|
||||||
|
while getopts "r:h" opt; do
|
||||||
|
case $opt in
|
||||||
|
r)
|
||||||
|
TGT_RELEASE=$OPTARG
|
||||||
|
;;
|
||||||
|
h)
|
||||||
|
show_usage
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
logger -t $0 "Unsupported option"
|
||||||
|
show_usage
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ -z "${TGT_RELEASE}" ]; then
|
||||||
|
logger -t $0 "You must specify the target release."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "${TGT_RELEASE}" = "${SW_VERSION}" ]; then
|
||||||
|
logger -t $0 "Target release cannot be running release."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
declare TGT_BASE_REPO=/var/www/pages/feed/rel-${TGT_RELEASE}
|
||||||
|
declare TGT_PATCHES_REPO=/var/www/pages/updates/rel-${TGT_RELEASE}
|
||||||
|
|
||||||
|
if [ ! -d ${TGT_BASE_REPO} ]; then
|
||||||
|
logger -t $0 "Target release ${TGT_RELEASE} is not installed"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
declare TGT_PATCHES_REPO_OPT=""
|
||||||
|
if [ -d ${TGT_PATCHES_REPO} ]; then
|
||||||
|
TGT_PATCHES_REPO_OPT="--repofrompath updates,${TGT_PATCHES_REPO}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
declare WORKDIR=
|
||||||
|
|
||||||
|
function cleanup() {
|
||||||
|
if [ -n "${WORKDIR}" -a -d "${WORKDIR}" ]; then
|
||||||
|
rm -rf ${WORKDIR}
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
trap cleanup EXIT
|
||||||
|
|
||||||
|
function extract_pkg() {
|
||||||
|
local pkgname=$1
|
||||||
|
|
||||||
|
ORIG_PWD=$PWD
|
||||||
|
cd $WORKDIR
|
||||||
|
|
||||||
|
# Find the RPM
|
||||||
|
local pkgfile=$(dnf repoquery --disablerepo=* --repofrompath base,${TGT_BASE_REPO} ${TGT_PATCHES_REPO_OPT} --latest-limit=1 --location -q ${pkgname})
|
||||||
|
if [ -z "${pkgfile}" ]; then
|
||||||
|
logger -t $0 "Could not find ${pkgname}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Chop off the file: from the start of the file location
|
||||||
|
local rpmfile=${pkgfile/file://}
|
||||||
|
|
||||||
|
rpm2cpio ${rpmfile} | cpio -idm
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
logger -t $0 "Failed to extract $pkgname files from ${pkgfile/file://}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
cd ${ORIG_PWD}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Extract files from pxe-network-installer
|
||||||
|
WORKDIR=$(mktemp -d --tmpdir=/scratch pkgextract_XXXX)
|
||||||
|
if [ -z "${WORKDIR}" -o ! -d "${WORKDIR}" ]; then
|
||||||
|
logger -t $0 "Failed to create workdir"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
# Clean dnf cache in case a previous load had different package versions
|
||||||
|
dnf clean expire-cache
|
||||||
|
extract_pkg pxe-network-installer
|
||||||
|
rsync -ac ${WORKDIR}/usr/ /usr/ &&
|
||||||
|
rsync -ac ${WORKDIR}/var/pxeboot/rel-${TGT_RELEASE}/ /var/pxeboot/rel-${TGT_RELEASE}/ &&
|
||||||
|
rsync -c ${WORKDIR}/var/pxeboot/pxelinux.cfg.files/*-${TGT_RELEASE} /var/pxeboot/pxelinux.cfg.files/ &&
|
||||||
|
rsync -ac ${WORKDIR}/var/www/pages/feed/rel-${TGT_RELEASE}/ /var/www/pages/feed/rel-${TGT_RELEASE}/
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
logger -t $0 "rsync command failed, extracting pxe-network-installer"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
rm -rf ${WORKDIR}
|
||||||
|
|
||||||
|
# Extract files from platform-kickstarts
|
||||||
|
WORKDIR=$(mktemp -d --tmpdir=/scratch pkgextract_XXXX)
|
||||||
|
if [ -z "${WORKDIR}" -o ! -d "${WORKDIR}" ]; then
|
||||||
|
logger -t $0 "Failed to create workdir"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
extract_pkg platform-kickstarts
|
||||||
|
rsync -ac ${WORKDIR}/var/www/pages/feed/rel-${TGT_RELEASE}/ /var/www/pages/feed/rel-${TGT_RELEASE}/
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
logger -t $0 "rsync command failed, extracting platform-kickstarts"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
rm -rf ${WORKDIR}
|
||||||
|
|
||||||
|
exit 0
|
||||||
|
|
7
sw-patch/cgcs-patch/.coveragerc
Normal file
7
sw-patch/cgcs-patch/.coveragerc
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
[run]
|
||||||
|
branch = True
|
||||||
|
source = cgcs_patch
|
||||||
|
omit = cgcs_patch/tests/*
|
||||||
|
|
||||||
|
[report]
|
||||||
|
ignore_errors = True
|
2
sw-patch/cgcs-patch/.stestr.conf
Normal file
2
sw-patch/cgcs-patch/.stestr.conf
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
[DEFAULT]
|
||||||
|
test_path=cgcs_patch/tests
|
202
sw-patch/cgcs-patch/LICENSE
Normal file
202
sw-patch/cgcs-patch/LICENSE
Normal file
@ -0,0 +1,202 @@
|
|||||||
|
|
||||||
|
Apache License
|
||||||
|
Version 2.0, January 2004
|
||||||
|
http://www.apache.org/licenses/
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||||
|
|
||||||
|
1. Definitions.
|
||||||
|
|
||||||
|
"License" shall mean the terms and conditions for use, reproduction,
|
||||||
|
and distribution as defined by Sections 1 through 9 of this document.
|
||||||
|
|
||||||
|
"Licensor" shall mean the copyright owner or entity authorized by
|
||||||
|
the copyright owner that is granting the License.
|
||||||
|
|
||||||
|
"Legal Entity" shall mean the union of the acting entity and all
|
||||||
|
other entities that control, are controlled by, or are under common
|
||||||
|
control with that entity. For the purposes of this definition,
|
||||||
|
"control" means (i) the power, direct or indirect, to cause the
|
||||||
|
direction or management of such entity, whether by contract or
|
||||||
|
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||||
|
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||||
|
|
||||||
|
"You" (or "Your") shall mean an individual or Legal Entity
|
||||||
|
exercising permissions granted by this License.
|
||||||
|
|
||||||
|
"Source" form shall mean the preferred form for making modifications,
|
||||||
|
including but not limited to software source code, documentation
|
||||||
|
source, and configuration files.
|
||||||
|
|
||||||
|
"Object" form shall mean any form resulting from mechanical
|
||||||
|
transformation or translation of a Source form, including but
|
||||||
|
not limited to compiled object code, generated documentation,
|
||||||
|
and conversions to other media types.
|
||||||
|
|
||||||
|
"Work" shall mean the work of authorship, whether in Source or
|
||||||
|
Object form, made available under the License, as indicated by a
|
||||||
|
copyright notice that is included in or attached to the work
|
||||||
|
(an example is provided in the Appendix below).
|
||||||
|
|
||||||
|
"Derivative Works" shall mean any work, whether in Source or Object
|
||||||
|
form, that is based on (or derived from) the Work and for which the
|
||||||
|
editorial revisions, annotations, elaborations, or other modifications
|
||||||
|
represent, as a whole, an original work of authorship. For the purposes
|
||||||
|
of this License, Derivative Works shall not include works that remain
|
||||||
|
separable from, or merely link (or bind by name) to the interfaces of,
|
||||||
|
the Work and Derivative Works thereof.
|
||||||
|
|
||||||
|
"Contribution" shall mean any work of authorship, including
|
||||||
|
the original version of the Work and any modifications or additions
|
||||||
|
to that Work or Derivative Works thereof, that is intentionally
|
||||||
|
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||||
|
or by an individual or Legal Entity authorized to submit on behalf of
|
||||||
|
the copyright owner. For the purposes of this definition, "submitted"
|
||||||
|
means any form of electronic, verbal, or written communication sent
|
||||||
|
to the Licensor or its representatives, including but not limited to
|
||||||
|
communication on electronic mailing lists, source code control systems,
|
||||||
|
and issue tracking systems that are managed by, or on behalf of, the
|
||||||
|
Licensor for the purpose of discussing and improving the Work, but
|
||||||
|
excluding communication that is conspicuously marked or otherwise
|
||||||
|
designated in writing by the copyright owner as "Not a Contribution."
|
||||||
|
|
||||||
|
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||||
|
on behalf of whom a Contribution has been received by Licensor and
|
||||||
|
subsequently incorporated within the Work.
|
||||||
|
|
||||||
|
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
copyright license to reproduce, prepare Derivative Works of,
|
||||||
|
publicly display, publicly perform, sublicense, and distribute the
|
||||||
|
Work and such Derivative Works in Source or Object form.
|
||||||
|
|
||||||
|
3. Grant of Patent License. Subject to the terms and conditions of
|
||||||
|
this License, each Contributor hereby grants to You a perpetual,
|
||||||
|
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||||
|
(except as stated in this section) patent license to make, have made,
|
||||||
|
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||||
|
where such license applies only to those patent claims licensable
|
||||||
|
by such Contributor that are necessarily infringed by their
|
||||||
|
Contribution(s) alone or by combination of their Contribution(s)
|
||||||
|
with the Work to which such Contribution(s) was submitted. If You
|
||||||
|
institute patent litigation against any entity (including a
|
||||||
|
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||||
|
or a Contribution incorporated within the Work constitutes direct
|
||||||
|
or contributory patent infringement, then any patent licenses
|
||||||
|
granted to You under this License for that Work shall terminate
|
||||||
|
as of the date such litigation is filed.
|
||||||
|
|
||||||
|
4. Redistribution. You may reproduce and distribute copies of the
|
||||||
|
Work or Derivative Works thereof in any medium, with or without
|
||||||
|
modifications, and in Source or Object form, provided that You
|
||||||
|
meet the following conditions:
|
||||||
|
|
||||||
|
(a) You must give any other recipients of the Work or
|
||||||
|
Derivative Works a copy of this License; and
|
||||||
|
|
||||||
|
(b) You must cause any modified files to carry prominent notices
|
||||||
|
stating that You changed the files; and
|
||||||
|
|
||||||
|
(c) You must retain, in the Source form of any Derivative Works
|
||||||
|
that You distribute, all copyright, patent, trademark, and
|
||||||
|
attribution notices from the Source form of the Work,
|
||||||
|
excluding those notices that do not pertain to any part of
|
||||||
|
the Derivative Works; and
|
||||||
|
|
||||||
|
(d) If the Work includes a "NOTICE" text file as part of its
|
||||||
|
distribution, then any Derivative Works that You distribute must
|
||||||
|
include a readable copy of the attribution notices contained
|
||||||
|
within such NOTICE file, excluding those notices that do not
|
||||||
|
pertain to any part of the Derivative Works, in at least one
|
||||||
|
of the following places: within a NOTICE text file distributed
|
||||||
|
as part of the Derivative Works; within the Source form or
|
||||||
|
documentation, if provided along with the Derivative Works; or,
|
||||||
|
within a display generated by the Derivative Works, if and
|
||||||
|
wherever such third-party notices normally appear. The contents
|
||||||
|
of the NOTICE file are for informational purposes only and
|
||||||
|
do not modify the License. You may add Your own attribution
|
||||||
|
notices within Derivative Works that You distribute, alongside
|
||||||
|
or as an addendum to the NOTICE text from the Work, provided
|
||||||
|
that such additional attribution notices cannot be construed
|
||||||
|
as modifying the License.
|
||||||
|
|
||||||
|
You may add Your own copyright statement to Your modifications and
|
||||||
|
may provide additional or different license terms and conditions
|
||||||
|
for use, reproduction, or distribution of Your modifications, or
|
||||||
|
for any such Derivative Works as a whole, provided Your use,
|
||||||
|
reproduction, and distribution of the Work otherwise complies with
|
||||||
|
the conditions stated in this License.
|
||||||
|
|
||||||
|
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||||
|
any Contribution intentionally submitted for inclusion in the Work
|
||||||
|
by You to the Licensor shall be under the terms and conditions of
|
||||||
|
this License, without any additional terms or conditions.
|
||||||
|
Notwithstanding the above, nothing herein shall supersede or modify
|
||||||
|
the terms of any separate license agreement you may have executed
|
||||||
|
with Licensor regarding such Contributions.
|
||||||
|
|
||||||
|
6. Trademarks. This License does not grant permission to use the trade
|
||||||
|
names, trademarks, service marks, or product names of the Licensor,
|
||||||
|
except as required for reasonable and customary use in describing the
|
||||||
|
origin of the Work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||||
|
agreed to in writing, Licensor provides the Work (and each
|
||||||
|
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
|
implied, including, without limitation, any warranties or conditions
|
||||||
|
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||||
|
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||||
|
appropriateness of using or redistributing the Work and assume any
|
||||||
|
risks associated with Your exercise of permissions under this License.
|
||||||
|
|
||||||
|
8. Limitation of Liability. In no event and under no legal theory,
|
||||||
|
whether in tort (including negligence), contract, or otherwise,
|
||||||
|
unless required by applicable law (such as deliberate and grossly
|
||||||
|
negligent acts) or agreed to in writing, shall any Contributor be
|
||||||
|
liable to You for damages, including any direct, indirect, special,
|
||||||
|
incidental, or consequential damages of any character arising as a
|
||||||
|
result of this License or out of the use or inability to use the
|
||||||
|
Work (including but not limited to damages for loss of goodwill,
|
||||||
|
work stoppage, computer failure or malfunction, or any and all
|
||||||
|
other commercial damages or losses), even if such Contributor
|
||||||
|
has been advised of the possibility of such damages.
|
||||||
|
|
||||||
|
9. Accepting Warranty or Additional Liability. While redistributing
|
||||||
|
the Work or Derivative Works thereof, You may choose to offer,
|
||||||
|
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||||
|
or other liability obligations and/or rights consistent with this
|
||||||
|
License. However, in accepting such obligations, You may act only
|
||||||
|
on Your own behalf and on Your sole responsibility, not on behalf
|
||||||
|
of any other Contributor, and only if You agree to indemnify,
|
||||||
|
defend, and hold each Contributor harmless for any liability
|
||||||
|
incurred by, or claims asserted against, such Contributor by reason
|
||||||
|
of your accepting any such warranty or additional liability.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
|
To apply the Apache License to your work, attach the following
|
||||||
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
|
replaced with your own identifying information. (Don't include
|
||||||
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
|
comment syntax for the file format. We also recommend that a
|
||||||
|
file or class name and description of purpose be included on the
|
||||||
|
same "printed page" as the copyright notice for easier
|
||||||
|
identification within third-party archives.
|
||||||
|
|
||||||
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
you may not use this file except in compliance with the License.
|
||||||
|
You may obtain a copy of the License at
|
||||||
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software
|
||||||
|
distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
See the License for the specific language governing permissions and
|
||||||
|
limitations under the License.
|
6
sw-patch/cgcs-patch/cgcs_patch/__init__.py
Normal file
6
sw-patch/cgcs-patch/cgcs_patch/__init__.py
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
"""
|
||||||
|
Copyright (c) 2014-2017 Wind River Systems, Inc.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""
|
30
sw-patch/cgcs-patch/cgcs_patch/api/__init__.py
Normal file
30
sw-patch/cgcs-patch/cgcs_patch/api/__init__.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
"""
|
||||||
|
Copyright (c) 2014-2017 Wind River Systems, Inc.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from oslo_config import cfg
|
||||||
|
|
||||||
|
|
||||||
|
API_SERVICE_OPTS = [
|
||||||
|
cfg.StrOpt('api_bind_ip',
|
||||||
|
default='127.0.0.1',
|
||||||
|
help='IP for the Patching controller API server to bind to',
|
||||||
|
),
|
||||||
|
cfg.IntOpt('api_port',
|
||||||
|
default=5487,
|
||||||
|
help='The port for the Patching controller API server',
|
||||||
|
),
|
||||||
|
cfg.IntOpt('api_limit_max',
|
||||||
|
default=1000,
|
||||||
|
help='the maximum number of items returned in a single '
|
||||||
|
'response from a collection resource'),
|
||||||
|
]
|
||||||
|
|
||||||
|
CONF = cfg.CONF
|
||||||
|
opt_group = cfg.OptGroup(name='api',
|
||||||
|
title='Options for the Patching controller api service')
|
||||||
|
CONF.register_group(opt_group)
|
||||||
|
CONF.register_opts(API_SERVICE_OPTS)
|
43
sw-patch/cgcs-patch/cgcs_patch/api/app.py
Normal file
43
sw-patch/cgcs-patch/cgcs_patch/api/app.py
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
"""
|
||||||
|
Copyright (c) 2014-2017 Wind River Systems, Inc.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import pecan
|
||||||
|
|
||||||
|
from cgcs_patch.api import config
|
||||||
|
|
||||||
|
|
||||||
|
def get_pecan_config():
|
||||||
|
# Set up the pecan configuration
|
||||||
|
filename = config.__file__.replace('.pyc', '.py')
|
||||||
|
return pecan.configuration.conf_from_file(filename)
|
||||||
|
|
||||||
|
|
||||||
|
def setup_app(pecan_config=None):
|
||||||
|
if not pecan_config:
|
||||||
|
pecan_config = get_pecan_config()
|
||||||
|
|
||||||
|
pecan.configuration.set_config(dict(pecan_config), overwrite=True)
|
||||||
|
|
||||||
|
app = pecan.make_app(
|
||||||
|
pecan_config.app.root,
|
||||||
|
static_root=pecan_config.app.static_root,
|
||||||
|
template_path=pecan_config.app.template_path,
|
||||||
|
debug=False,
|
||||||
|
force_canonical=getattr(pecan_config.app, 'force_canonical', True),
|
||||||
|
guess_content_type_from_ext=False, # Avoid mime-type lookup
|
||||||
|
)
|
||||||
|
|
||||||
|
return app
|
||||||
|
|
||||||
|
|
||||||
|
class VersionSelectorApplication(object):
|
||||||
|
def __init__(self):
|
||||||
|
pc = get_pecan_config()
|
||||||
|
self.v1 = setup_app(pecan_config=pc)
|
||||||
|
|
||||||
|
def __call__(self, environ, start_response):
|
||||||
|
return self.v1(environ, start_response)
|
23
sw-patch/cgcs-patch/cgcs_patch/api/config.py
Normal file
23
sw-patch/cgcs-patch/cgcs_patch/api/config.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
"""
|
||||||
|
Copyright (c) 2014-2017 Wind River Systems, Inc.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Server Specific Configurations
|
||||||
|
server = {
|
||||||
|
'port': '5487',
|
||||||
|
'host': '127.0.0.1'
|
||||||
|
}
|
||||||
|
|
||||||
|
# Pecan Application Configurations
|
||||||
|
app = {
|
||||||
|
'root': 'cgcs_patch.api.controllers.root.RootController',
|
||||||
|
'modules': ['cgcs_patch.authapi'],
|
||||||
|
'static_root': '%(confdir)s/public',
|
||||||
|
'template_path': '%(confdir)s/../templates',
|
||||||
|
'debug': False,
|
||||||
|
'enable_acl': True,
|
||||||
|
'acl_public_routes': [],
|
||||||
|
}
|
@ -0,0 +1,6 @@
|
|||||||
|
"""
|
||||||
|
Copyright (c) 2014-2017 Wind River Systems, Inc.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""
|
293
sw-patch/cgcs-patch/cgcs_patch/api/controllers/root.py
Normal file
293
sw-patch/cgcs-patch/cgcs_patch/api/controllers/root.py
Normal file
@ -0,0 +1,293 @@
|
|||||||
|
"""
|
||||||
|
Copyright (c) 2014-2019 Wind River Systems, Inc.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from pecan import expose
|
||||||
|
from pecan import request
|
||||||
|
import cgi
|
||||||
|
import glob
|
||||||
|
|
||||||
|
from cgcs_patch.exceptions import PatchError
|
||||||
|
from cgcs_patch.patch_controller import pc
|
||||||
|
|
||||||
|
from cgcs_patch.patch_functions import LOG
|
||||||
|
|
||||||
|
|
||||||
|
class PatchAPIController(object):
|
||||||
|
|
||||||
|
@expose('json')
|
||||||
|
@expose('query.xml', content_type='application/xml')
|
||||||
|
def index(self):
|
||||||
|
return self.query()
|
||||||
|
|
||||||
|
@expose('json')
|
||||||
|
@expose('query.xml', content_type='application/xml')
|
||||||
|
def query(self, **kwargs):
|
||||||
|
try:
|
||||||
|
pd = pc.patch_query_cached(**kwargs)
|
||||||
|
except PatchError as e:
|
||||||
|
return dict(error="Error: %s" % str(e))
|
||||||
|
|
||||||
|
return dict(pd=pd)
|
||||||
|
|
||||||
|
@expose('json')
|
||||||
|
@expose('show.xml', content_type='application/xml')
|
||||||
|
def show(self, *args):
|
||||||
|
try:
|
||||||
|
result = pc.patch_query_specific_cached(list(args))
|
||||||
|
except PatchError as e:
|
||||||
|
return dict(error="Error: %s" % str(e))
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
@expose('json')
|
||||||
|
@expose('query.xml', content_type='application/xml')
|
||||||
|
def apply(self, *args, **kwargs):
|
||||||
|
if pc.any_patch_host_installing():
|
||||||
|
return dict(error="Rejected: One or more nodes are installing patches.")
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = pc.patch_apply_api(list(args), **kwargs)
|
||||||
|
except PatchError as e:
|
||||||
|
return dict(error="Error: %s" % str(e))
|
||||||
|
|
||||||
|
pc.patch_sync()
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
@expose('json')
|
||||||
|
@expose('query.xml', content_type='application/xml')
|
||||||
|
def remove(self, *args, **kwargs):
|
||||||
|
if pc.any_patch_host_installing():
|
||||||
|
return dict(error="Rejected: One or more nodes are installing patches.")
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = pc.patch_remove_api(list(args), **kwargs)
|
||||||
|
except PatchError as e:
|
||||||
|
return dict(error="Error: %s" % str(e))
|
||||||
|
|
||||||
|
pc.patch_sync()
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
@expose('json')
|
||||||
|
@expose('query.xml', content_type='application/xml')
|
||||||
|
def delete(self, *args):
|
||||||
|
try:
|
||||||
|
result = pc.patch_delete_api(list(args))
|
||||||
|
except PatchError as e:
|
||||||
|
return dict(error="Error: %s" % str(e))
|
||||||
|
|
||||||
|
pc.patch_sync()
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
@expose('json')
|
||||||
|
@expose('query.xml', content_type='application/xml')
|
||||||
|
def upload(self):
|
||||||
|
assert isinstance(request.POST['file'], cgi.FieldStorage)
|
||||||
|
fileitem = request.POST['file']
|
||||||
|
|
||||||
|
if not fileitem.filename:
|
||||||
|
return dict(error="Error: No file uploaded")
|
||||||
|
|
||||||
|
fn = '/scratch/' + os.path.basename(fileitem.filename)
|
||||||
|
|
||||||
|
if hasattr(fileitem.file, 'fileno'):
|
||||||
|
# This technique cannot copy a very large file. It
|
||||||
|
# requires a lot of memory as all data from the
|
||||||
|
# source file is read into memory then written to
|
||||||
|
# the destination file one chunk
|
||||||
|
# open(fn, 'wb').write(fileitem.file.read())
|
||||||
|
|
||||||
|
# Copying file by chunks using OS system calls
|
||||||
|
# requires much less memory. A larger chunk
|
||||||
|
# size can be used to improve the copy speed;
|
||||||
|
# currently 64K chunk size is selected
|
||||||
|
dst = os.open(fn, os.O_WRONLY | os.O_CREAT)
|
||||||
|
src = fileitem.file.fileno()
|
||||||
|
size = 64 * 1024
|
||||||
|
n = size
|
||||||
|
while n >= size:
|
||||||
|
s = os.read(src, size)
|
||||||
|
n = os.write(dst, s)
|
||||||
|
os.close(dst)
|
||||||
|
else:
|
||||||
|
open(fn, 'wb').write(fileitem.file.read())
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = pc.patch_import_api([fn])
|
||||||
|
except PatchError as e:
|
||||||
|
os.remove(fn)
|
||||||
|
return dict(error=str(e))
|
||||||
|
|
||||||
|
os.remove(fn)
|
||||||
|
|
||||||
|
pc.patch_sync()
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
@expose('json')
|
||||||
|
def upload_dir(self, **kwargs):
|
||||||
|
files = []
|
||||||
|
for path in kwargs.values():
|
||||||
|
LOG.info("upload-dir: Retrieving patches from %s", path)
|
||||||
|
for f in glob.glob(path + '/*.patch'):
|
||||||
|
if os.path.isfile(f):
|
||||||
|
files.append(f)
|
||||||
|
|
||||||
|
if len(files) == 0:
|
||||||
|
return dict(error="No patches found")
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = pc.patch_import_api(sorted(files))
|
||||||
|
except PatchError as e:
|
||||||
|
return dict(error=str(e))
|
||||||
|
|
||||||
|
pc.patch_sync()
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
@expose('json')
|
||||||
|
def init_release(self, *args):
|
||||||
|
if len(list(args)) == 0:
|
||||||
|
return dict(error="Release must be specified")
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = pc.patch_init_release_api(list(args)[0])
|
||||||
|
except PatchError as e:
|
||||||
|
return dict(error=str(e))
|
||||||
|
|
||||||
|
pc.patch_sync()
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
@expose('json')
|
||||||
|
def del_release(self, *args):
|
||||||
|
if len(list(args)) == 0:
|
||||||
|
return dict(error="Release must be specified")
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = pc.patch_del_release_api(list(args)[0])
|
||||||
|
except PatchError as e:
|
||||||
|
return dict(error=str(e))
|
||||||
|
|
||||||
|
pc.patch_sync()
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
@expose('json')
|
||||||
|
@expose('query_hosts.xml', content_type='application/xml')
|
||||||
|
def query_hosts(self, *args): # pylint: disable=unused-argument
|
||||||
|
return dict(data=pc.query_host_cache())
|
||||||
|
|
||||||
|
@expose('json')
|
||||||
|
@expose('query.xml', content_type='application/xml')
|
||||||
|
def what_requires(self, *args):
|
||||||
|
try:
|
||||||
|
result = pc.patch_query_what_requires(list(args))
|
||||||
|
except PatchError as e:
|
||||||
|
return dict(error="Error: %s" % str(e))
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
@expose('json')
|
||||||
|
@expose('query.xml', content_type='application/xml')
|
||||||
|
def host_install(self, *args): # pylint: disable=unused-argument
|
||||||
|
return dict(error="Deprecated: Use host_install_async")
|
||||||
|
|
||||||
|
@expose('json')
|
||||||
|
@expose('query.xml', content_type='application/xml')
|
||||||
|
def host_install_async(self, *args):
|
||||||
|
if len(list(args)) == 0:
|
||||||
|
return dict(error="Host must be specified for install")
|
||||||
|
force = False
|
||||||
|
if len(list(args)) > 1 and 'force' in list(args)[1:]:
|
||||||
|
force = True
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = pc.patch_host_install(list(args)[0], force, async_req=True)
|
||||||
|
except PatchError as e:
|
||||||
|
return dict(error="Error: %s" % str(e))
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
@expose('json')
|
||||||
|
@expose('query.xml', content_type='application/xml')
|
||||||
|
def drop_host(self, *args):
|
||||||
|
if len(list(args)) == 0:
|
||||||
|
return dict(error="Host must be specified")
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = pc.drop_host(list(args)[0])
|
||||||
|
except PatchError as e:
|
||||||
|
return dict(error="Error: %s" % str(e))
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
@expose('json')
|
||||||
|
def query_dependencies(self, *args, **kwargs):
|
||||||
|
try:
|
||||||
|
result = pc.patch_query_dependencies(list(args), **kwargs)
|
||||||
|
except PatchError as e:
|
||||||
|
return dict(error=str(e))
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
@expose('json')
|
||||||
|
def commit(self, *args):
|
||||||
|
try:
|
||||||
|
result = pc.patch_commit(list(args))
|
||||||
|
except PatchError as e:
|
||||||
|
return dict(error=str(e))
|
||||||
|
|
||||||
|
pc.patch_sync()
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
@expose('json')
|
||||||
|
def commit_dry_run(self, *args):
|
||||||
|
try:
|
||||||
|
result = pc.patch_commit(list(args), dry_run=True)
|
||||||
|
except PatchError as e:
|
||||||
|
return dict(error=str(e))
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
@expose('json')
|
||||||
|
def is_applied(self, *args):
|
||||||
|
return pc.is_applied(list(args))
|
||||||
|
|
||||||
|
@expose('json')
|
||||||
|
def is_available(self, *args):
|
||||||
|
return pc.is_available(list(args))
|
||||||
|
|
||||||
|
@expose('json')
|
||||||
|
def report_app_dependencies(self, *args, **kwargs):
|
||||||
|
try:
|
||||||
|
result = pc.report_app_dependencies(list(args), **kwargs)
|
||||||
|
except PatchError as e:
|
||||||
|
return dict(status=500, error=str(e))
|
||||||
|
|
||||||
|
pc.patch_sync()
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
@expose('json')
|
||||||
|
def query_app_dependencies(self):
|
||||||
|
return pc.query_app_dependencies()
|
||||||
|
|
||||||
|
|
||||||
|
class RootController(object):
|
||||||
|
|
||||||
|
@expose()
|
||||||
|
@expose('json')
|
||||||
|
def index(self):
|
||||||
|
return "Titanium Cloud Patching API, Available versions: /v1"
|
||||||
|
|
||||||
|
patch = PatchAPIController()
|
||||||
|
v1 = PatchAPIController()
|
24
sw-patch/cgcs-patch/cgcs_patch/app.py
Normal file
24
sw-patch/cgcs-patch/cgcs_patch/app.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
"""
|
||||||
|
Copyright (c) 2014-2017 Wind River Systems, Inc.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from pecan import make_app
|
||||||
|
|
||||||
|
|
||||||
|
def setup_app(config):
|
||||||
|
|
||||||
|
return make_app(
|
||||||
|
config.app.root,
|
||||||
|
static_root=config.app.static_root,
|
||||||
|
template_path=config.app.template_path,
|
||||||
|
logging=getattr(config, 'logging', {}),
|
||||||
|
debug=getattr(config.app, 'debug', False),
|
||||||
|
force_canonical=getattr(config.app, 'force_canonical', True),
|
||||||
|
guess_content_type_from_ext=getattr(
|
||||||
|
config.app,
|
||||||
|
'guess_content_type_from_ext',
|
||||||
|
True),
|
||||||
|
)
|
25
sw-patch/cgcs-patch/cgcs_patch/authapi/__init__.py
Executable file
25
sw-patch/cgcs-patch/cgcs_patch/authapi/__init__.py
Executable file
@ -0,0 +1,25 @@
|
|||||||
|
# Copyright (c) 2013-2017 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#
|
||||||
|
|
||||||
|
from oslo_config import cfg
|
||||||
|
|
||||||
|
API_SERVICE_OPTS = [
|
||||||
|
cfg.StrOpt('auth_api_bind_ip',
|
||||||
|
default=None,
|
||||||
|
help='IP for the authenticated Patching API server to bind to'),
|
||||||
|
cfg.IntOpt('auth_api_port',
|
||||||
|
default=5491,
|
||||||
|
help='The port for the authenticated Patching API server'),
|
||||||
|
cfg.IntOpt('api_limit_max',
|
||||||
|
default=1000,
|
||||||
|
help='the maximum number of items returned in a single '
|
||||||
|
'response from a collection resource')
|
||||||
|
]
|
||||||
|
|
||||||
|
CONF = cfg.CONF
|
||||||
|
opt_group = cfg.OptGroup(name='api',
|
||||||
|
title='Options for the patch-api service')
|
||||||
|
CONF.register_group(opt_group)
|
||||||
|
CONF.register_opts(API_SERVICE_OPTS)
|
30
sw-patch/cgcs-patch/cgcs_patch/authapi/acl.py
Executable file
30
sw-patch/cgcs-patch/cgcs_patch/authapi/acl.py
Executable file
@ -0,0 +1,30 @@
|
|||||||
|
"""
|
||||||
|
Copyright (c) 2014-2017 Wind River Systems, Inc.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from cgcs_patch.authapi import auth_token
|
||||||
|
|
||||||
|
OPT_GROUP_NAME = 'keystone_authtoken'
|
||||||
|
|
||||||
|
|
||||||
|
"""Access Control Lists (ACL's) control access the API server."""
|
||||||
|
|
||||||
|
|
||||||
|
def install(app, conf, public_routes):
|
||||||
|
"""Install ACL check on application.
|
||||||
|
|
||||||
|
:param app: A WSGI application.
|
||||||
|
:param conf: Settings. Must include OPT_GROUP_NAME section.
|
||||||
|
:param public_routes: The list of the routes which will be allowed
|
||||||
|
access without authentication.
|
||||||
|
:return: The same WSGI application with ACL installed.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
keystone_config = dict(conf.items(OPT_GROUP_NAME))
|
||||||
|
return auth_token.AuthTokenMiddleware(app,
|
||||||
|
conf=keystone_config,
|
||||||
|
public_api_routes=public_routes)
|
77
sw-patch/cgcs-patch/cgcs_patch/authapi/app.py
Executable file
77
sw-patch/cgcs-patch/cgcs_patch/authapi/app.py
Executable file
@ -0,0 +1,77 @@
|
|||||||
|
"""
|
||||||
|
Copyright (c) 2014-2017 Wind River Systems, Inc.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from oslo_config import cfg
|
||||||
|
import pecan
|
||||||
|
|
||||||
|
from cgcs_patch.authapi import acl
|
||||||
|
from cgcs_patch.authapi import config
|
||||||
|
from cgcs_patch.authapi import hooks
|
||||||
|
from cgcs_patch.authapi import policy
|
||||||
|
|
||||||
|
from six.moves import configparser
|
||||||
|
|
||||||
|
auth_opts = [
|
||||||
|
cfg.StrOpt('auth_strategy',
|
||||||
|
default='keystone',
|
||||||
|
help='Method to use for auth: noauth or keystone.'),
|
||||||
|
]
|
||||||
|
|
||||||
|
CONF = cfg.CONF
|
||||||
|
CONF.register_opts(auth_opts)
|
||||||
|
|
||||||
|
|
||||||
|
def get_pecan_config():
|
||||||
|
# Set up the pecan configuration
|
||||||
|
filename = config.__file__.replace('.pyc', '.py')
|
||||||
|
return pecan.configuration.conf_from_file(filename)
|
||||||
|
|
||||||
|
|
||||||
|
def setup_app(pecan_config=None, extra_hooks=None):
|
||||||
|
config_parser = configparser.RawConfigParser()
|
||||||
|
config_parser.read('/etc/patching/patching.conf')
|
||||||
|
|
||||||
|
policy.init()
|
||||||
|
|
||||||
|
app_hooks = [hooks.ConfigHook(),
|
||||||
|
hooks.ContextHook(pecan_config.app.acl_public_routes),
|
||||||
|
]
|
||||||
|
if extra_hooks:
|
||||||
|
app_hooks.extend(extra_hooks)
|
||||||
|
|
||||||
|
if not pecan_config:
|
||||||
|
pecan_config = get_pecan_config()
|
||||||
|
|
||||||
|
if pecan_config.app.enable_acl:
|
||||||
|
app_hooks.append(hooks.AdminAuthHook())
|
||||||
|
|
||||||
|
pecan.configuration.set_config(dict(pecan_config), overwrite=True)
|
||||||
|
|
||||||
|
app = pecan.make_app(
|
||||||
|
pecan_config.app.root,
|
||||||
|
static_root=pecan_config.app.static_root,
|
||||||
|
template_path=pecan_config.app.template_path,
|
||||||
|
debug=False,
|
||||||
|
force_canonical=getattr(pecan_config.app, 'force_canonical', True),
|
||||||
|
hooks=app_hooks,
|
||||||
|
guess_content_type_from_ext=False, # Avoid mime-type lookup
|
||||||
|
)
|
||||||
|
|
||||||
|
if pecan_config.app.enable_acl:
|
||||||
|
return acl.install(app, config_parser, pecan_config.app.acl_public_routes)
|
||||||
|
|
||||||
|
return app
|
||||||
|
|
||||||
|
|
||||||
|
class VersionSelectorApplication(object):
|
||||||
|
def __init__(self):
|
||||||
|
pc = get_pecan_config()
|
||||||
|
pc.app.enable_acl = (CONF.auth_strategy == 'keystone')
|
||||||
|
self.v1 = setup_app(pecan_config=pc)
|
||||||
|
|
||||||
|
def __call__(self, environ, start_response):
|
||||||
|
return self.v1(environ, start_response)
|
40
sw-patch/cgcs-patch/cgcs_patch/authapi/auth_token.py
Executable file
40
sw-patch/cgcs-patch/cgcs_patch/authapi/auth_token.py
Executable file
@ -0,0 +1,40 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
|
||||||
|
from keystonemiddleware import auth_token
|
||||||
|
from sysinv.common import utils
|
||||||
|
|
||||||
|
|
||||||
|
class AuthTokenMiddleware(auth_token.AuthProtocol):
|
||||||
|
"""A wrapper on Keystone auth_token middleware.
|
||||||
|
|
||||||
|
Does not perform verification of authentication tokens
|
||||||
|
for public routes in the API.
|
||||||
|
|
||||||
|
"""
|
||||||
|
def __init__(self, app, conf, public_api_routes=None):
|
||||||
|
if public_api_routes is None:
|
||||||
|
public_api_routes = []
|
||||||
|
|
||||||
|
self.public_api_routes = set(public_api_routes)
|
||||||
|
|
||||||
|
super(AuthTokenMiddleware, self).__init__(app, conf)
|
||||||
|
|
||||||
|
def __call__(self, env, start_response):
|
||||||
|
path = utils.safe_rstrip(env.get('PATH_INFO'), '/')
|
||||||
|
|
||||||
|
if path in self.public_api_routes:
|
||||||
|
return self.app(env, start_response) # pylint: disable=no-member
|
||||||
|
|
||||||
|
return super(AuthTokenMiddleware, self).__call__(env, start_response) # pylint: disable=too-many-function-args
|
23
sw-patch/cgcs-patch/cgcs_patch/authapi/config.py
Executable file
23
sw-patch/cgcs-patch/cgcs_patch/authapi/config.py
Executable file
@ -0,0 +1,23 @@
|
|||||||
|
"""
|
||||||
|
Copyright (c) 2014-2017 Wind River Systems, Inc.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Server Specific Configurations
|
||||||
|
server = {
|
||||||
|
'port': '5491',
|
||||||
|
'host': '0.0.0.0'
|
||||||
|
}
|
||||||
|
|
||||||
|
# Pecan Application Configurations
|
||||||
|
app = {
|
||||||
|
'root': 'cgcs_patch.api.controllers.root.RootController',
|
||||||
|
'modules': ['cgcs_patch.api'],
|
||||||
|
'static_root': '%(confdir)s/public',
|
||||||
|
'template_path': '%(confdir)s/../templates',
|
||||||
|
'debug': False,
|
||||||
|
'enable_acl': True,
|
||||||
|
'acl_public_routes': [],
|
||||||
|
}
|
100
sw-patch/cgcs-patch/cgcs_patch/authapi/hooks.py
Executable file
100
sw-patch/cgcs-patch/cgcs_patch/authapi/hooks.py
Executable file
@ -0,0 +1,100 @@
|
|||||||
|
# -*- encoding: utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright © 2012 New Dream Network, LLC (DreamHost)
|
||||||
|
#
|
||||||
|
# Author: Doug Hellmann <doug.hellmann@dreamhost.com>
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
#
|
||||||
|
# Copyright (c) 2013-2017 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
from oslo_config import cfg
|
||||||
|
from pecan import hooks
|
||||||
|
|
||||||
|
from sysinv.common import context
|
||||||
|
from sysinv.common import utils
|
||||||
|
from sysinv.openstack.common import policy
|
||||||
|
from webob import exc
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigHook(hooks.PecanHook):
|
||||||
|
"""Attach the config object to the request so controllers can get to it."""
|
||||||
|
|
||||||
|
def before(self, state):
|
||||||
|
state.request.cfg = cfg.CONF
|
||||||
|
|
||||||
|
|
||||||
|
class ContextHook(hooks.PecanHook):
|
||||||
|
"""Configures a request context and attaches it to the request.
|
||||||
|
|
||||||
|
The following HTTP request headers are used:
|
||||||
|
|
||||||
|
X-User-Id or X-User:
|
||||||
|
Used for context.user_id.
|
||||||
|
|
||||||
|
X-Tenant-Id or X-Tenant:
|
||||||
|
Used for context.tenant.
|
||||||
|
|
||||||
|
X-Auth-Token:
|
||||||
|
Used for context.auth_token.
|
||||||
|
|
||||||
|
X-Roles:
|
||||||
|
Used for setting context.is_admin flag to either True or False.
|
||||||
|
The flag is set to True, if X-Roles contains either an administrator
|
||||||
|
or admin substring. Otherwise it is set to False.
|
||||||
|
|
||||||
|
"""
|
||||||
|
def __init__(self, public_api_routes):
|
||||||
|
self.public_api_routes = public_api_routes
|
||||||
|
super(ContextHook, self).__init__()
|
||||||
|
|
||||||
|
def before(self, state):
|
||||||
|
user_id = state.request.headers.get('X-User-Id')
|
||||||
|
user_id = state.request.headers.get('X-User', user_id)
|
||||||
|
tenant = state.request.headers.get('X-Tenant-Id')
|
||||||
|
tenant = state.request.headers.get('X-Tenant', tenant)
|
||||||
|
domain_id = state.request.headers.get('X-User-Domain-Id')
|
||||||
|
domain_name = state.request.headers.get('X-User-Domain-Name')
|
||||||
|
auth_token = state.request.headers.get('X-Auth-Token', None)
|
||||||
|
creds = {'roles': state.request.headers.get('X-Roles', '').split(',')}
|
||||||
|
|
||||||
|
is_admin = policy.check('admin', state.request.headers, creds)
|
||||||
|
|
||||||
|
path = utils.safe_rstrip(state.request.path, '/')
|
||||||
|
is_public_api = path in self.public_api_routes
|
||||||
|
|
||||||
|
state.request.context = context.RequestContext(
|
||||||
|
auth_token=auth_token,
|
||||||
|
user=user_id,
|
||||||
|
tenant=tenant,
|
||||||
|
domain_id=domain_id,
|
||||||
|
domain_name=domain_name,
|
||||||
|
is_admin=is_admin,
|
||||||
|
is_public_api=is_public_api)
|
||||||
|
|
||||||
|
|
||||||
|
class AdminAuthHook(hooks.PecanHook):
|
||||||
|
"""Verify that the user has admin rights.
|
||||||
|
|
||||||
|
Checks whether the request context is an admin context and
|
||||||
|
rejects the request otherwise.
|
||||||
|
|
||||||
|
"""
|
||||||
|
def before(self, state):
|
||||||
|
ctx = state.request.context
|
||||||
|
is_admin_api = policy.check('admin_api', {}, ctx.to_dict())
|
||||||
|
|
||||||
|
if not is_admin_api and not ctx.is_public_api:
|
||||||
|
raise exc.HTTPForbidden()
|
117
sw-patch/cgcs-patch/cgcs_patch/authapi/policy.py
Executable file
117
sw-patch/cgcs-patch/cgcs_patch/authapi/policy.py
Executable file
@ -0,0 +1,117 @@
|
|||||||
|
#
|
||||||
|
# Copyright (c) 2011 OpenStack Foundation
|
||||||
|
# All Rights Reserved.
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
# not use this file except in compliance with the License. You may obtain
|
||||||
|
# a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||||
|
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||||
|
# License for the specific language governing permissions and limitations
|
||||||
|
# under the License.
|
||||||
|
#
|
||||||
|
# Copyright (c) 2014-2017 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
|
||||||
|
"""Policy Engine For Patching."""
|
||||||
|
|
||||||
|
import os.path
|
||||||
|
|
||||||
|
from sysinv.common import exception
|
||||||
|
from sysinv.common import utils
|
||||||
|
from sysinv.openstack.common import policy
|
||||||
|
|
||||||
|
|
||||||
|
_POLICY_PATH = None
|
||||||
|
_POLICY_CACHE = {}
|
||||||
|
|
||||||
|
|
||||||
|
def reset():
|
||||||
|
global _POLICY_PATH
|
||||||
|
global _POLICY_CACHE
|
||||||
|
_POLICY_PATH = None
|
||||||
|
_POLICY_CACHE = {}
|
||||||
|
policy.reset()
|
||||||
|
|
||||||
|
|
||||||
|
def init():
|
||||||
|
global _POLICY_PATH
|
||||||
|
global _POLICY_CACHE
|
||||||
|
if not _POLICY_PATH:
|
||||||
|
_POLICY_PATH = '/etc/patching/policy.json'
|
||||||
|
if not os.path.exists(_POLICY_PATH):
|
||||||
|
raise exception.ConfigNotFound(message='/etc/patching/policy.json')
|
||||||
|
utils.read_cached_file(_POLICY_PATH, _POLICY_CACHE,
|
||||||
|
reload_func=_set_rules)
|
||||||
|
|
||||||
|
|
||||||
|
def _set_rules(data):
|
||||||
|
default_rule = "rule:admin_api"
|
||||||
|
policy.set_rules(policy.Rules.load_json(data, default_rule))
|
||||||
|
|
||||||
|
|
||||||
|
def enforce(context, action, target, do_raise=True):
|
||||||
|
"""Verifies that the action is valid on the target in this context.
|
||||||
|
|
||||||
|
:param context: sysinv context
|
||||||
|
:param action: string representing the action to be checked
|
||||||
|
this should be colon separated for clarity.
|
||||||
|
i.e. ``compute:create_instance``,
|
||||||
|
``compute:attach_volume``,
|
||||||
|
``volume:attach_volume``
|
||||||
|
:param target: dictionary representing the object of the action
|
||||||
|
for object creation this should be a dictionary representing the
|
||||||
|
location of the object e.g. ``{'project_id': context.project_id}``
|
||||||
|
:param do_raise: if True (the default), raises PolicyNotAuthorized;
|
||||||
|
if False, returns False
|
||||||
|
|
||||||
|
:raises sysinv.exception.PolicyNotAuthorized: if verification fails
|
||||||
|
and do_raise is True.
|
||||||
|
|
||||||
|
:return: returns a non-False value (not necessarily "True") if
|
||||||
|
authorized, and the exact value False if not authorized and
|
||||||
|
do_raise is False.
|
||||||
|
"""
|
||||||
|
init()
|
||||||
|
|
||||||
|
credentials = context.to_dict()
|
||||||
|
|
||||||
|
# Add the exception arguments if asked to do a raise
|
||||||
|
extra = {}
|
||||||
|
if do_raise:
|
||||||
|
extra.update(exc=exception.PolicyNotAuthorized, action=action)
|
||||||
|
|
||||||
|
return policy.check(action, target, credentials, **extra)
|
||||||
|
|
||||||
|
|
||||||
|
def check_is_admin(context):
|
||||||
|
"""Whether or not role contains 'admin' role according to policy setting.
|
||||||
|
|
||||||
|
"""
|
||||||
|
init()
|
||||||
|
|
||||||
|
credentials = context.to_dict()
|
||||||
|
target = credentials
|
||||||
|
|
||||||
|
return policy.check('context_is_admin', target, credentials)
|
||||||
|
|
||||||
|
|
||||||
|
@policy.register('context_is_admin')
|
||||||
|
class IsAdminCheck(policy.Check):
|
||||||
|
"""An explicit check for is_admin."""
|
||||||
|
|
||||||
|
def __init__(self, kind, match):
|
||||||
|
"""Initialize the check."""
|
||||||
|
|
||||||
|
self.expected = (match.lower() == 'true')
|
||||||
|
|
||||||
|
super(IsAdminCheck, self).__init__(kind, str(self.expected))
|
||||||
|
|
||||||
|
def __call__(self, target, creds):
|
||||||
|
"""Determine whether is_admin matches the requested value."""
|
||||||
|
|
||||||
|
return creds['is_admin'] == self.expected
|
170
sw-patch/cgcs-patch/cgcs_patch/base.py
Normal file
170
sw-patch/cgcs-patch/cgcs_patch/base.py
Normal file
@ -0,0 +1,170 @@
|
|||||||
|
"""
|
||||||
|
Copyright (c) 2017-2021 Wind River Systems, Inc.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import socket
|
||||||
|
import struct
|
||||||
|
import subprocess
|
||||||
|
import time
|
||||||
|
|
||||||
|
import cgcs_patch.utils as utils
|
||||||
|
import cgcs_patch.config as cfg
|
||||||
|
import cgcs_patch.constants as constants
|
||||||
|
from cgcs_patch.patch_functions import LOG
|
||||||
|
|
||||||
|
|
||||||
|
class PatchService(object):
|
||||||
|
def __init__(self):
|
||||||
|
self.sock_out = None
|
||||||
|
self.sock_in = None
|
||||||
|
self.service_type = None
|
||||||
|
self.port = None
|
||||||
|
self.mcast_addr = None
|
||||||
|
self.socket_lock = None
|
||||||
|
|
||||||
|
def update_config(self):
|
||||||
|
# Implemented in subclass
|
||||||
|
pass
|
||||||
|
|
||||||
|
def socket_lock_acquire(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def socket_lock_release(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def setup_socket_ipv4(self):
|
||||||
|
mgmt_ip = cfg.get_mgmt_ip()
|
||||||
|
if mgmt_ip is None:
|
||||||
|
# Don't setup socket unless we have a mgmt ip
|
||||||
|
return None
|
||||||
|
|
||||||
|
self.update_config()
|
||||||
|
|
||||||
|
interface_addr = socket.inet_pton(socket.AF_INET, mgmt_ip)
|
||||||
|
|
||||||
|
# Close sockets, if necessary
|
||||||
|
for s in [self.sock_out, self.sock_in]:
|
||||||
|
if s is not None:
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
self.sock_out = socket.socket(socket.AF_INET,
|
||||||
|
socket.SOCK_DGRAM)
|
||||||
|
self.sock_in = socket.socket(socket.AF_INET,
|
||||||
|
socket.SOCK_DGRAM)
|
||||||
|
|
||||||
|
self.sock_out.setblocking(0)
|
||||||
|
self.sock_in.setblocking(0)
|
||||||
|
|
||||||
|
self.sock_out.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
self.sock_in.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
|
||||||
|
self.sock_in.bind(('', self.port))
|
||||||
|
|
||||||
|
if self.mcast_addr:
|
||||||
|
# These options are for outgoing multicast messages
|
||||||
|
self.sock_out.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_IF, interface_addr)
|
||||||
|
self.sock_out.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 1)
|
||||||
|
# Since only the controllers are sending to this address,
|
||||||
|
# we want the loopback so the local agent can receive it
|
||||||
|
self.sock_out.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 1)
|
||||||
|
|
||||||
|
# Register the multicast group
|
||||||
|
group = socket.inet_pton(socket.AF_INET, self.mcast_addr)
|
||||||
|
mreq = struct.pack('=4s4s', group, interface_addr)
|
||||||
|
|
||||||
|
self.sock_in.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
|
||||||
|
|
||||||
|
return self.sock_in
|
||||||
|
|
||||||
|
def setup_socket_ipv6(self):
|
||||||
|
mgmt_ip = cfg.get_mgmt_ip()
|
||||||
|
if mgmt_ip is None:
|
||||||
|
# Don't setup socket unless we have a mgmt ip
|
||||||
|
return None
|
||||||
|
|
||||||
|
self.update_config()
|
||||||
|
|
||||||
|
# Close sockets, if necessary
|
||||||
|
for s in [self.sock_out, self.sock_in]:
|
||||||
|
if s is not None:
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
self.sock_out = socket.socket(socket.AF_INET6,
|
||||||
|
socket.SOCK_DGRAM)
|
||||||
|
self.sock_in = socket.socket(socket.AF_INET6,
|
||||||
|
socket.SOCK_DGRAM)
|
||||||
|
|
||||||
|
self.sock_out.setblocking(0)
|
||||||
|
self.sock_in.setblocking(0)
|
||||||
|
|
||||||
|
self.sock_out.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
self.sock_in.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
|
||||||
|
self.sock_out.bind((mgmt_ip, 0))
|
||||||
|
self.sock_in.bind(('', self.port))
|
||||||
|
|
||||||
|
if self.mcast_addr:
|
||||||
|
# These options are for outgoing multicast messages
|
||||||
|
mgmt_ifindex = utils.if_nametoindex(cfg.get_mgmt_iface())
|
||||||
|
self.sock_out.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_IF, mgmt_ifindex)
|
||||||
|
self.sock_out.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_HOPS, 1)
|
||||||
|
# Since only the controllers are sending to this address,
|
||||||
|
# we want the loopback so the local agent can receive it
|
||||||
|
self.sock_out.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_LOOP, 1)
|
||||||
|
|
||||||
|
# Register the multicast group
|
||||||
|
if_index_packed = struct.pack('I', mgmt_ifindex)
|
||||||
|
group = socket.inet_pton(socket.AF_INET6, self.mcast_addr) + if_index_packed
|
||||||
|
self.sock_in.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, group)
|
||||||
|
|
||||||
|
return self.sock_in
|
||||||
|
|
||||||
|
def setup_socket(self):
|
||||||
|
self.socket_lock_acquire()
|
||||||
|
|
||||||
|
try:
|
||||||
|
sock_in = None
|
||||||
|
if utils.get_management_version() == constants.ADDRESS_VERSION_IPV6:
|
||||||
|
sock_in = self.setup_socket_ipv6()
|
||||||
|
else:
|
||||||
|
sock_in = self.setup_socket_ipv4()
|
||||||
|
self.socket_lock_release()
|
||||||
|
return sock_in
|
||||||
|
except Exception:
|
||||||
|
LOG.exception("Failed to setup socket")
|
||||||
|
|
||||||
|
# Close sockets, if necessary
|
||||||
|
for s in [self.sock_out, self.sock_in]:
|
||||||
|
if s is not None:
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
self.socket_lock_release()
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def audit_socket(self):
|
||||||
|
if not self.mcast_addr:
|
||||||
|
# Multicast address not configured, therefore nothing to do
|
||||||
|
return
|
||||||
|
|
||||||
|
# Ensure multicast address is still allocated
|
||||||
|
cmd = "ip maddr show %s | awk 'BEGIN {ORS=\"\"}; {if ($2 == \"%s\") print $2}'" % \
|
||||||
|
(cfg.get_mgmt_iface(), self.mcast_addr)
|
||||||
|
try:
|
||||||
|
result = subprocess.check_output(cmd, shell=True)
|
||||||
|
|
||||||
|
if result == self.mcast_addr:
|
||||||
|
return
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
LOG.error("Command output: %s", e.output)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Close the socket and set it up again
|
||||||
|
LOG.info("Detected missing multicast addr (%s). Reconfiguring", self.mcast_addr)
|
||||||
|
while self.setup_socket() is None:
|
||||||
|
LOG.info("Unable to setup sockets. Waiting to retry")
|
||||||
|
time.sleep(5)
|
||||||
|
LOG.info("Multicast address reconfigured")
|
51
sw-patch/cgcs-patch/cgcs_patch/certificates.py
Normal file
51
sw-patch/cgcs-patch/cgcs_patch/certificates.py
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
"""
|
||||||
|
Copyright (c) 2017 Wind River Systems, Inc.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
dev_certificate = b"""-----BEGIN CERTIFICATE-----
|
||||||
|
MIIDejCCAmKgAwIBAgICEAQwDQYJKoZIhvcNAQELBQAwQjELMAkGA1UEBhMCQ0Ex
|
||||||
|
EDAOBgNVBAgMB09udGFyaW8xITAfBgNVBAoMGFdpbmQgUml2ZXIgU3lzdGVtcywg
|
||||||
|
SW5jLjAeFw0xNzA4MTgxNDM3MjlaFw0yNzA4MTYxNDM3MjlaMEExCzAJBgNVBAYT
|
||||||
|
AkNBMRAwDgYDVQQIDAdPbnRhcmlvMSAwHgYDVQQKDBdXaW5kIFJpdmVyIFN5c3Rl
|
||||||
|
bXMsIEluYzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALcs0/Te6x69
|
||||||
|
lxQOxudrF+uSC5F9r5bKUnZNWUKHyXKlN4SzZgWGs+fb/DqXIm7piuoQ6GH7GEQd
|
||||||
|
BEN1j/bwp30LZlv0Ur+8jhCvEdqsIP3vUXfv7pv0bomVs0Q8ZRI/FYZhjxYlyFKr
|
||||||
|
gZFV9WPP8S9SwfClHjaYRUudvwvjHHnnnkZ9blVFbXU0Xe83A8fWd0HNqAU1TlmK
|
||||||
|
4CeSi4FI4aRKiXJnOvgv2UoJMI57rBIVKYRUH8uuFpPofOwjOM/Rd6r3Ir+4/CX6
|
||||||
|
+/NALOBIEN6M05ZzoiyiH8NHELknQBqzNs0cXObJWpaSinAOcBnPCc7DNRwgQzjR
|
||||||
|
SdcE9FG1+LcCAwEAAaN7MHkwCQYDVR0TBAIwADAsBglghkgBhvhCAQ0EHxYdT3Bl
|
||||||
|
blNTTCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFDRbal2KxU0hQyv4
|
||||||
|
MVnWrW96+aWoMB8GA1UdIwQYMBaAFJaLO1x8+jti7V6pLGbUyqpy0M36MA0GCSqG
|
||||||
|
SIb3DQEBCwUAA4IBAQBmcPFZzEoPtuMPCFvJ/0cmngp8yvCGxWz3JEDkdGYSCVGs
|
||||||
|
TG5e9DeltaHOk6yLvZSRY1so30GQnyB9q8v4DwEGVslKg8u9w/WEU81wl6Q2FZ5s
|
||||||
|
XRP6TASQ0Lbg9e4b3bnTITJJ8jT/zF29NaohgC2fg0UwVuldZLfa7FihJB4//OC1
|
||||||
|
UdNEcmdqTVRqN2oco1n3ZUWKXvG2AvGsoiqu+lsWX1MXacoFvJexSACLrUvOoXMW
|
||||||
|
i38Ofp7XMCAm3rM0cXv7Uc9WCrgnTWbEvDgjGfRAmcM9moWGoWX6E46Xkojpkfle
|
||||||
|
Ss6CHAMK42aZ/+MWQlZEzNK49PtomGMjn5SuoK8u
|
||||||
|
-----END CERTIFICATE-----"""
|
||||||
|
|
||||||
|
formal_certificate = b"""-----BEGIN CERTIFICATE-----
|
||||||
|
MIIDezCCAmOgAwIBAgICEAMwDQYJKoZIhvcNAQELBQAwQjELMAkGA1UEBhMCQ0Ex
|
||||||
|
EDAOBgNVBAgMB09udGFyaW8xITAfBgNVBAoMGFdpbmQgUml2ZXIgU3lzdGVtcywg
|
||||||
|
SW5jLjAeFw0xNzA4MTgxNDM1MTJaFw0yNzA4MTYxNDM1MTJaMEIxCzAJBgNVBAYT
|
||||||
|
AkNBMRAwDgYDVQQIDAdPbnRhcmlvMSEwHwYDVQQKDBhXaW5kIFJpdmVyIFN5c3Rl
|
||||||
|
bXMsIEluYy4wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC+0fS8ybg8
|
||||||
|
M37lW+lcR9LmQAR2zUJdbnl2L0fj3W/7W+PMm3mJWeQDTf19wf+qHHrgEkjxGp10
|
||||||
|
BSXWZYdPyCdOjAay/Ew1s/waFeAQZpf4vv/9D1Y/4sVkqct9ibo5NVgvVsjqKVnX
|
||||||
|
IVhyzHlhBSUqYhZlS/SOx8JcLQWSUMJoP2XR4Tv28xIXi0Fuyp8QBwUmSwmvfPy4
|
||||||
|
0yxzfON/b8kHld5aTY353KLXh/5YWsn1zRlOYfS1OuJk4LGjm6HvmZtxPNUZk4vI
|
||||||
|
NA24rH4FKkuxyM3x8aPi3LE4G6GSrJDuNi28xzOj864rlFoyLODy/mov1YMR/g4k
|
||||||
|
d3mG6UbRckPxAgMBAAGjezB5MAkGA1UdEwQCMAAwLAYJYIZIAYb4QgENBB8WHU9w
|
||||||
|
ZW5TU0wgR2VuZXJhdGVkIENlcnRpZmljYXRlMB0GA1UdDgQWBBTjyMN/AX07rEmB
|
||||||
|
6sz6pnyt/m+eSzAfBgNVHSMEGDAWgBSWiztcfPo7Yu1eqSxm1MqqctDN+jANBgkq
|
||||||
|
hkiG9w0BAQsFAAOCAQEASpyCu/adGTvNjyy/tV+sL/kaVEKLA7q36HUrzQkTjMPX
|
||||||
|
y8L8PVZoeWprkz7cvYTyHmVTPLBvFkGEFVn8LWi9fTTp/UrHnxw6fvb+V78mOypi
|
||||||
|
4A1aU9+dh3L6arpd4jZ4hDiLhEClesGCYVTVBdsrh3zSOc51nT4hosyBVpRd/VgQ
|
||||||
|
jhGJBBMEXASZceady4ajK5jnR3wF8oW/he4NYF97qh8WWKVsIYbwgLS0rT58q7qq
|
||||||
|
vpjPxMOahUdACkyPyt/XJICTlkanVD7KgG3oLWpc+3FWPHGr+F7mspPLZqUcEFDV
|
||||||
|
bGF+oDJ7p/tqHsNvPlRDVGqh0QdiAkKeS/SJC9jmAw==
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
"""
|
138
sw-patch/cgcs-patch/cgcs_patch/config.py
Normal file
138
sw-patch/cgcs-patch/cgcs_patch/config.py
Normal file
@ -0,0 +1,138 @@
|
|||||||
|
"""
|
||||||
|
Copyright (c) 2014-2017 Wind River Systems, Inc.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import six
|
||||||
|
from six.moves import configparser
|
||||||
|
import io
|
||||||
|
import logging
|
||||||
|
import socket
|
||||||
|
import cgcs_patch.utils as utils
|
||||||
|
import cgcs_patch.constants as constants
|
||||||
|
import tsconfig.tsconfig as tsc
|
||||||
|
|
||||||
|
controller_mcast_group = None
|
||||||
|
agent_mcast_group = None
|
||||||
|
controller_port = 0
|
||||||
|
agent_port = 0
|
||||||
|
api_port = 0
|
||||||
|
mgmt_if = None
|
||||||
|
nodetype = None
|
||||||
|
platform_conf_mtime = 0
|
||||||
|
patching_conf_mtime = 0
|
||||||
|
patching_conf = '/etc/patching/patching.conf'
|
||||||
|
|
||||||
|
|
||||||
|
def read_config():
|
||||||
|
global patching_conf_mtime
|
||||||
|
global patching_conf
|
||||||
|
|
||||||
|
if patching_conf_mtime == os.stat(patching_conf).st_mtime:
|
||||||
|
# The file has not changed since it was last read
|
||||||
|
return
|
||||||
|
|
||||||
|
defaults = {
|
||||||
|
'controller_mcast_group': "239.1.1.3",
|
||||||
|
'agent_mcast_group': "239.1.1.4",
|
||||||
|
'api_port': "5487",
|
||||||
|
'controller_port': "5488",
|
||||||
|
'agent_port': "5489",
|
||||||
|
}
|
||||||
|
|
||||||
|
global controller_mcast_group
|
||||||
|
global agent_mcast_group
|
||||||
|
global api_port
|
||||||
|
global controller_port
|
||||||
|
global agent_port
|
||||||
|
|
||||||
|
# In python3 configparser uses strict mode by default. It doesn't
|
||||||
|
# agree duplicate keys, and will throw an error
|
||||||
|
# In python2 the strict argument is missing
|
||||||
|
# TODO(dsafta): the logic branching here can be removed once
|
||||||
|
# https://bugs.launchpad.net/starlingx/+bug/1931529 is fixed, allowing
|
||||||
|
# python3 parser to work in strict mode.
|
||||||
|
|
||||||
|
if six.PY2:
|
||||||
|
config = configparser.SafeConfigParser(defaults)
|
||||||
|
elif six.PY3:
|
||||||
|
config = configparser.SafeConfigParser(defaults, strict=False)
|
||||||
|
|
||||||
|
config.read(patching_conf)
|
||||||
|
patching_conf_mtime = os.stat(patching_conf).st_mtime
|
||||||
|
|
||||||
|
controller_mcast_group = config.get('runtime',
|
||||||
|
'controller_multicast')
|
||||||
|
agent_mcast_group = config.get('runtime', 'agent_multicast')
|
||||||
|
|
||||||
|
api_port = config.getint('runtime', 'api_port')
|
||||||
|
controller_port = config.getint('runtime', 'controller_port')
|
||||||
|
agent_port = config.getint('runtime', 'agent_port')
|
||||||
|
|
||||||
|
# The platform.conf file has no section headers, which causes problems
|
||||||
|
# for ConfigParser. So we'll fake it out.
|
||||||
|
ini_str = u'[platform_conf]\n' + open(tsc.PLATFORM_CONF_FILE, 'r').read()
|
||||||
|
ini_fp = io.StringIO(ini_str)
|
||||||
|
config.readfp(ini_fp)
|
||||||
|
|
||||||
|
try:
|
||||||
|
value = str(config.get('platform_conf', 'nodetype'))
|
||||||
|
|
||||||
|
global nodetype
|
||||||
|
nodetype = value
|
||||||
|
except configparser.Error:
|
||||||
|
logging.exception("Failed to read nodetype from config")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def get_mgmt_ip():
|
||||||
|
# Check if initial config is complete
|
||||||
|
if not os.path.exists('/etc/platform/.initial_config_complete'):
|
||||||
|
return None
|
||||||
|
mgmt_hostname = socket.gethostname()
|
||||||
|
return utils.gethostbyname(mgmt_hostname)
|
||||||
|
|
||||||
|
|
||||||
|
# Because the patching daemons are launched before manifests are
|
||||||
|
# applied, the content of some settings in platform.conf can change,
|
||||||
|
# such as the management interface. As such, we can't just directly
|
||||||
|
# use tsc.management_interface
|
||||||
|
#
|
||||||
|
def get_mgmt_iface():
|
||||||
|
# Check if initial config is complete
|
||||||
|
if not os.path.exists(constants.INITIAL_CONFIG_COMPLETE_FLAG):
|
||||||
|
return None
|
||||||
|
|
||||||
|
global mgmt_if
|
||||||
|
global platform_conf_mtime
|
||||||
|
|
||||||
|
if mgmt_if is not None and \
|
||||||
|
platform_conf_mtime == os.stat(tsc.PLATFORM_CONF_FILE).st_mtime:
|
||||||
|
# The platform.conf file hasn't been modified since we read it,
|
||||||
|
# so return the cached value.
|
||||||
|
return mgmt_if
|
||||||
|
|
||||||
|
if six.PY2:
|
||||||
|
config = configparser.SafeConfigParser()
|
||||||
|
elif six.PY3:
|
||||||
|
config = configparser.SafeConfigParser(strict=False)
|
||||||
|
|
||||||
|
# The platform.conf file has no section headers, which causes problems
|
||||||
|
# for ConfigParser. So we'll fake it out.
|
||||||
|
ini_str = u'[platform_conf]\n' + open(tsc.PLATFORM_CONF_FILE, 'r').read()
|
||||||
|
ini_fp = io.StringIO(ini_str)
|
||||||
|
config.readfp(ini_fp)
|
||||||
|
|
||||||
|
try:
|
||||||
|
value = str(config.get('platform_conf', 'management_interface'))
|
||||||
|
|
||||||
|
mgmt_if = value
|
||||||
|
|
||||||
|
platform_conf_mtime = os.stat(tsc.PLATFORM_CONF_FILE).st_mtime
|
||||||
|
except configparser.Error:
|
||||||
|
logging.exception("Failed to read management_interface from config")
|
||||||
|
return None
|
||||||
|
return mgmt_if
|
51
sw-patch/cgcs-patch/cgcs_patch/constants.py
Normal file
51
sw-patch/cgcs-patch/cgcs_patch/constants.py
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
"""
|
||||||
|
Copyright (c) 2015-2021 Wind River Systems, Inc.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
try:
|
||||||
|
# The tsconfig module is only available at runtime
|
||||||
|
import tsconfig.tsconfig as tsc
|
||||||
|
|
||||||
|
INITIAL_CONFIG_COMPLETE_FLAG = os.path.join(
|
||||||
|
tsc.PLATFORM_CONF_PATH, ".initial_config_complete")
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
PATCH_AGENT_STATE_IDLE = "idle"
|
||||||
|
PATCH_AGENT_STATE_INSTALLING = "installing"
|
||||||
|
PATCH_AGENT_STATE_INSTALL_FAILED = "install-failed"
|
||||||
|
PATCH_AGENT_STATE_INSTALL_REJECTED = "install-rejected"
|
||||||
|
|
||||||
|
PATCH_STORAGE_DIR = "/opt/patching"
|
||||||
|
|
||||||
|
ADDRESS_VERSION_IPV4 = 4
|
||||||
|
ADDRESS_VERSION_IPV6 = 6
|
||||||
|
CONTROLLER_FLOATING_HOSTNAME = "controller"
|
||||||
|
|
||||||
|
AVAILABLE = 'Available'
|
||||||
|
APPLIED = 'Applied'
|
||||||
|
PARTIAL_APPLY = 'Partial-Apply'
|
||||||
|
PARTIAL_REMOVE = 'Partial-Remove'
|
||||||
|
COMMITTED = 'Committed'
|
||||||
|
UNKNOWN = 'n/a'
|
||||||
|
|
||||||
|
STATUS_OBSOLETE = 'OBS'
|
||||||
|
STATUS_RELEASED = 'REL'
|
||||||
|
STATUS_DEVELOPEMENT = 'DEV'
|
||||||
|
|
||||||
|
CLI_OPT_ALL = '--all'
|
||||||
|
CLI_OPT_DRY_RUN = '--dry-run'
|
||||||
|
CLI_OPT_RECURSIVE = '--recursive'
|
||||||
|
CLI_OPT_RELEASE = '--release'
|
||||||
|
|
||||||
|
ENABLE_DEV_CERTIFICATE_PATCH_IDENTIFIER = 'ENABLE_DEV_CERTIFICATE'
|
||||||
|
|
||||||
|
LOOPBACK_INTERFACE_NAME = "lo"
|
||||||
|
|
||||||
|
SEMANTIC_PREAPPLY = 'pre-apply'
|
||||||
|
SEMANTIC_PREREMOVE = 'pre-remove'
|
||||||
|
SEMANTIC_ACTIONS = [SEMANTIC_PREAPPLY, SEMANTIC_PREREMOVE]
|
57
sw-patch/cgcs-patch/cgcs_patch/exceptions.py
Normal file
57
sw-patch/cgcs-patch/cgcs_patch/exceptions.py
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
"""
|
||||||
|
Copyright (c) 2017 Wind River Systems, Inc.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class PatchError(Exception):
|
||||||
|
"""Base class for patching exceptions."""
|
||||||
|
|
||||||
|
def __init__(self, message=None):
|
||||||
|
super(PatchError, self).__init__(message)
|
||||||
|
self.message = message
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.message or ""
|
||||||
|
|
||||||
|
|
||||||
|
class MetadataFail(PatchError):
|
||||||
|
"""Metadata error."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class RpmFail(PatchError):
|
||||||
|
"""RPM error."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SemanticFail(PatchError):
|
||||||
|
"""Semantic check error."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class RepoFail(PatchError):
|
||||||
|
"""Repo error."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class PatchFail(PatchError):
|
||||||
|
"""General patching error."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class PatchValidationFailure(PatchError):
|
||||||
|
"""Patch validation error."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class PatchMismatchFailure(PatchError):
|
||||||
|
"""Patch validation error."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class PatchInvalidRequest(PatchError):
|
||||||
|
"""Invalid API request."""
|
||||||
|
pass
|
64
sw-patch/cgcs-patch/cgcs_patch/messages.py
Normal file
64
sw-patch/cgcs-patch/cgcs_patch/messages.py
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
"""
|
||||||
|
Copyright (c) 2014-2017 Wind River Systems, Inc.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from cgcs_patch.patch_functions import LOG
|
||||||
|
|
||||||
|
PATCHMSG_UNKNOWN = 0
|
||||||
|
PATCHMSG_HELLO = 1
|
||||||
|
PATCHMSG_HELLO_ACK = 2
|
||||||
|
PATCHMSG_SYNC_REQ = 3
|
||||||
|
PATCHMSG_SYNC_COMPLETE = 4
|
||||||
|
PATCHMSG_HELLO_AGENT = 5
|
||||||
|
PATCHMSG_HELLO_AGENT_ACK = 6
|
||||||
|
PATCHMSG_QUERY_DETAILED = 7
|
||||||
|
PATCHMSG_QUERY_DETAILED_RESP = 8
|
||||||
|
PATCHMSG_AGENT_INSTALL_REQ = 9
|
||||||
|
PATCHMSG_AGENT_INSTALL_RESP = 10
|
||||||
|
PATCHMSG_DROP_HOST_REQ = 11
|
||||||
|
|
||||||
|
PATCHMSG_STR = {
|
||||||
|
PATCHMSG_UNKNOWN: "unknown",
|
||||||
|
PATCHMSG_HELLO: "hello",
|
||||||
|
PATCHMSG_HELLO_ACK: "hello-ack",
|
||||||
|
PATCHMSG_SYNC_REQ: "sync-req",
|
||||||
|
PATCHMSG_SYNC_COMPLETE: "sync-complete",
|
||||||
|
PATCHMSG_HELLO_AGENT: "hello-agent",
|
||||||
|
PATCHMSG_HELLO_AGENT_ACK: "hello-agent-ack",
|
||||||
|
PATCHMSG_QUERY_DETAILED: "query-detailed",
|
||||||
|
PATCHMSG_QUERY_DETAILED_RESP: "query-detailed-resp",
|
||||||
|
PATCHMSG_AGENT_INSTALL_REQ: "agent-install-req",
|
||||||
|
PATCHMSG_AGENT_INSTALL_RESP: "agent-install-resp",
|
||||||
|
PATCHMSG_DROP_HOST_REQ: "drop-host-req",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class PatchMessage(object):
|
||||||
|
def __init__(self, msgtype=PATCHMSG_UNKNOWN):
|
||||||
|
self.msgtype = msgtype
|
||||||
|
self.msgversion = 1
|
||||||
|
self.message = {}
|
||||||
|
|
||||||
|
def decode(self, data):
|
||||||
|
if 'msgtype' in data:
|
||||||
|
self.msgtype = data['msgtype']
|
||||||
|
if 'msgversion' in data:
|
||||||
|
self.msgversion = data['msgversion']
|
||||||
|
|
||||||
|
def encode(self):
|
||||||
|
self.message['msgtype'] = self.msgtype
|
||||||
|
self.message['msgversion'] = self.msgversion
|
||||||
|
|
||||||
|
def data(self):
|
||||||
|
return {'msgtype': self.msgtype}
|
||||||
|
|
||||||
|
def msgtype_str(self):
|
||||||
|
if self.msgtype in PATCHMSG_STR:
|
||||||
|
return PATCHMSG_STR[self.msgtype]
|
||||||
|
return "invalid-type"
|
||||||
|
|
||||||
|
def handle(self, sock, addr): # pylint: disable=unused-argument
|
||||||
|
LOG.info("Unhandled message type: %s", self.msgtype)
|
941
sw-patch/cgcs-patch/cgcs_patch/patch_agent.py
Normal file
941
sw-patch/cgcs-patch/cgcs_patch/patch_agent.py
Normal file
@ -0,0 +1,941 @@
|
|||||||
|
"""
|
||||||
|
Copyright (c) 2014-2019 Wind River Systems, Inc.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import dnf
|
||||||
|
import dnf.callback
|
||||||
|
import dnf.comps
|
||||||
|
import dnf.exceptions
|
||||||
|
import dnf.rpm
|
||||||
|
import dnf.sack
|
||||||
|
import dnf.transaction
|
||||||
|
import json
|
||||||
|
import libdnf.transaction
|
||||||
|
import os
|
||||||
|
import random
|
||||||
|
import requests
|
||||||
|
import select
|
||||||
|
import shutil
|
||||||
|
import socket
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
|
from cgcs_patch.patch_functions import configure_logging
|
||||||
|
from cgcs_patch.patch_functions import LOG
|
||||||
|
import cgcs_patch.config as cfg
|
||||||
|
from cgcs_patch.base import PatchService
|
||||||
|
import cgcs_patch.utils as utils
|
||||||
|
import cgcs_patch.messages as messages
|
||||||
|
import cgcs_patch.constants as constants
|
||||||
|
|
||||||
|
from tsconfig.tsconfig import http_port
|
||||||
|
from tsconfig.tsconfig import install_uuid
|
||||||
|
from tsconfig.tsconfig import subfunctions
|
||||||
|
from tsconfig.tsconfig import SW_VERSION
|
||||||
|
|
||||||
|
pidfile_path = "/var/run/patch_agent.pid"
|
||||||
|
node_is_patched_file = "/var/run/node_is_patched"
|
||||||
|
node_is_patched_rr_file = "/var/run/node_is_patched_rr"
|
||||||
|
patch_installing_file = "/var/run/patch_installing"
|
||||||
|
patch_failed_file = "/var/run/patch_install_failed"
|
||||||
|
node_is_locked_file = "/var/run/.node_locked"
|
||||||
|
|
||||||
|
insvc_patch_scripts = "/run/patching/patch-scripts"
|
||||||
|
insvc_patch_flags = "/run/patching/patch-flags"
|
||||||
|
insvc_patch_restart_agent = "/run/patching/.restart.patch-agent"
|
||||||
|
|
||||||
|
run_insvc_patch_scripts_cmd = "/usr/sbin/run-patch-scripts"
|
||||||
|
|
||||||
|
pa = None
|
||||||
|
|
||||||
|
http_port_real = http_port
|
||||||
|
|
||||||
|
# DNF commands
|
||||||
|
dnf_cmd = ['/bin/dnf']
|
||||||
|
dnf_quiet = dnf_cmd + ['--quiet']
|
||||||
|
dnf_makecache = dnf_quiet + ['makecache',
|
||||||
|
'--disablerepo="*"',
|
||||||
|
'--enablerepo', 'platform-base',
|
||||||
|
'--enablerepo', 'platform-updates']
|
||||||
|
|
||||||
|
|
||||||
|
def setflag(fname):
|
||||||
|
try:
|
||||||
|
with open(fname, "w") as f:
|
||||||
|
f.write("%d\n" % os.getpid())
|
||||||
|
except Exception:
|
||||||
|
LOG.exception("Failed to update %s flag", fname)
|
||||||
|
|
||||||
|
|
||||||
|
def clearflag(fname):
|
||||||
|
if os.path.exists(fname):
|
||||||
|
try:
|
||||||
|
os.remove(fname)
|
||||||
|
except Exception:
|
||||||
|
LOG.exception("Failed to clear %s flag", fname)
|
||||||
|
|
||||||
|
|
||||||
|
def check_install_uuid():
|
||||||
|
controller_install_uuid_url = "http://controller:%s/feed/rel-%s/install_uuid" % (http_port_real, SW_VERSION)
|
||||||
|
try:
|
||||||
|
req = requests.get(controller_install_uuid_url)
|
||||||
|
if req.status_code != 200:
|
||||||
|
# If we're on controller-1, controller-0 may not have the install_uuid
|
||||||
|
# matching this release, if we're in an upgrade. If the file doesn't exist,
|
||||||
|
# bypass this check
|
||||||
|
if socket.gethostname() == "controller-1":
|
||||||
|
return True
|
||||||
|
|
||||||
|
LOG.error("Failed to get install_uuid from controller")
|
||||||
|
return False
|
||||||
|
except requests.ConnectionError:
|
||||||
|
LOG.error("Failed to connect to controller")
|
||||||
|
return False
|
||||||
|
|
||||||
|
controller_install_uuid = str(req.text).rstrip()
|
||||||
|
|
||||||
|
if install_uuid != controller_install_uuid:
|
||||||
|
LOG.error("Local install_uuid=%s doesn't match controller=%s", install_uuid, controller_install_uuid)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class PatchMessageHelloAgent(messages.PatchMessage):
|
||||||
|
def __init__(self):
|
||||||
|
messages.PatchMessage.__init__(self, messages.PATCHMSG_HELLO_AGENT)
|
||||||
|
self.patch_op_counter = 0
|
||||||
|
|
||||||
|
def decode(self, data):
|
||||||
|
messages.PatchMessage.decode(self, data)
|
||||||
|
if 'patch_op_counter' in data:
|
||||||
|
self.patch_op_counter = data['patch_op_counter']
|
||||||
|
|
||||||
|
def encode(self):
|
||||||
|
messages.PatchMessage.encode(self)
|
||||||
|
|
||||||
|
def handle(self, sock, addr):
|
||||||
|
# Send response
|
||||||
|
|
||||||
|
#
|
||||||
|
# If a user tries to do a host-install on an unlocked node,
|
||||||
|
# without bypassing the lock check (either via in-service
|
||||||
|
# patch or --force option), the agent will set its state
|
||||||
|
# to Install-Rejected in order to report back the rejection.
|
||||||
|
# However, since this should just be a transient state,
|
||||||
|
# we don't want the client reporting the Install-Rejected
|
||||||
|
# state indefinitely, so reset it to Idle after a minute or so.
|
||||||
|
#
|
||||||
|
if pa.state == constants.PATCH_AGENT_STATE_INSTALL_REJECTED:
|
||||||
|
if os.path.exists(node_is_locked_file):
|
||||||
|
# Node has been locked since rejected attempt. Reset the state
|
||||||
|
pa.state = constants.PATCH_AGENT_STATE_IDLE
|
||||||
|
elif (time.time() - pa.rejection_timestamp) > 60:
|
||||||
|
# Rejected state for more than a minute. Reset it.
|
||||||
|
pa.state = constants.PATCH_AGENT_STATE_IDLE
|
||||||
|
|
||||||
|
if self.patch_op_counter > 0:
|
||||||
|
pa.handle_patch_op_counter(self.patch_op_counter)
|
||||||
|
|
||||||
|
resp = PatchMessageHelloAgentAck()
|
||||||
|
resp.send(sock)
|
||||||
|
|
||||||
|
def send(self, sock): # pylint: disable=unused-argument
|
||||||
|
LOG.error("Should not get here")
|
||||||
|
|
||||||
|
|
||||||
|
class PatchMessageHelloAgentAck(messages.PatchMessage):
|
||||||
|
def __init__(self):
|
||||||
|
messages.PatchMessage.__init__(self, messages.PATCHMSG_HELLO_AGENT_ACK)
|
||||||
|
|
||||||
|
def encode(self):
|
||||||
|
global pa
|
||||||
|
messages.PatchMessage.encode(self)
|
||||||
|
self.message['query_id'] = pa.query_id
|
||||||
|
self.message['out_of_date'] = pa.changes
|
||||||
|
self.message['hostname'] = socket.gethostname()
|
||||||
|
self.message['requires_reboot'] = pa.node_is_patched
|
||||||
|
self.message['patch_failed'] = pa.patch_failed
|
||||||
|
self.message['sw_version'] = SW_VERSION
|
||||||
|
self.message['state'] = pa.state
|
||||||
|
|
||||||
|
def handle(self, sock, addr):
|
||||||
|
LOG.error("Should not get here")
|
||||||
|
|
||||||
|
def send(self, sock):
|
||||||
|
global pa
|
||||||
|
self.encode()
|
||||||
|
message = json.dumps(self.message)
|
||||||
|
sock.sendto(str.encode(message), (pa.controller_address, cfg.controller_port))
|
||||||
|
|
||||||
|
|
||||||
|
class PatchMessageQueryDetailed(messages.PatchMessage):
|
||||||
|
def __init__(self):
|
||||||
|
messages.PatchMessage.__init__(self, messages.PATCHMSG_QUERY_DETAILED)
|
||||||
|
|
||||||
|
def decode(self, data):
|
||||||
|
messages.PatchMessage.decode(self, data)
|
||||||
|
|
||||||
|
def encode(self):
|
||||||
|
# Nothing to add to the HELLO_AGENT, so just call the super class
|
||||||
|
messages.PatchMessage.encode(self)
|
||||||
|
|
||||||
|
def handle(self, sock, addr):
|
||||||
|
# Send response
|
||||||
|
LOG.info("Handling detailed query")
|
||||||
|
resp = PatchMessageQueryDetailedResp()
|
||||||
|
resp.send(sock)
|
||||||
|
|
||||||
|
def send(self, sock): # pylint: disable=unused-argument
|
||||||
|
LOG.error("Should not get here")
|
||||||
|
|
||||||
|
|
||||||
|
class PatchMessageQueryDetailedResp(messages.PatchMessage):
|
||||||
|
def __init__(self):
|
||||||
|
messages.PatchMessage.__init__(self, messages.PATCHMSG_QUERY_DETAILED_RESP)
|
||||||
|
|
||||||
|
def encode(self):
|
||||||
|
global pa
|
||||||
|
messages.PatchMessage.encode(self)
|
||||||
|
self.message['installed'] = pa.installed
|
||||||
|
self.message['to_remove'] = pa.to_remove
|
||||||
|
self.message['missing_pkgs'] = pa.missing_pkgs
|
||||||
|
self.message['duplicated_pkgs'] = pa.duplicated_pkgs
|
||||||
|
self.message['nodetype'] = cfg.nodetype
|
||||||
|
self.message['sw_version'] = SW_VERSION
|
||||||
|
self.message['subfunctions'] = subfunctions
|
||||||
|
self.message['state'] = pa.state
|
||||||
|
|
||||||
|
def handle(self, sock, addr):
|
||||||
|
LOG.error("Should not get here")
|
||||||
|
|
||||||
|
def send(self, sock):
|
||||||
|
self.encode()
|
||||||
|
message = json.dumps(self.message)
|
||||||
|
sock.sendall(str.encode(message))
|
||||||
|
|
||||||
|
|
||||||
|
class PatchMessageAgentInstallReq(messages.PatchMessage):
|
||||||
|
def __init__(self):
|
||||||
|
messages.PatchMessage.__init__(self, messages.PATCHMSG_AGENT_INSTALL_REQ)
|
||||||
|
self.force = False
|
||||||
|
|
||||||
|
def decode(self, data):
|
||||||
|
messages.PatchMessage.decode(self, data)
|
||||||
|
if 'force' in data:
|
||||||
|
self.force = data['force']
|
||||||
|
|
||||||
|
def encode(self):
|
||||||
|
# Nothing to add to the HELLO_AGENT, so just call the super class
|
||||||
|
messages.PatchMessage.encode(self)
|
||||||
|
|
||||||
|
def handle(self, sock, addr):
|
||||||
|
LOG.info("Handling host install request, force=%s", self.force)
|
||||||
|
global pa
|
||||||
|
resp = PatchMessageAgentInstallResp()
|
||||||
|
|
||||||
|
if not os.path.exists(node_is_locked_file):
|
||||||
|
if self.force:
|
||||||
|
LOG.info("Installing on unlocked node, with force option")
|
||||||
|
else:
|
||||||
|
LOG.info("Rejecting install request on unlocked node")
|
||||||
|
pa.state = constants.PATCH_AGENT_STATE_INSTALL_REJECTED
|
||||||
|
pa.rejection_timestamp = time.time()
|
||||||
|
resp.status = False
|
||||||
|
resp.reject_reason = 'Node must be locked.'
|
||||||
|
resp.send(sock, addr)
|
||||||
|
return
|
||||||
|
|
||||||
|
resp.status = pa.handle_install()
|
||||||
|
resp.send(sock, addr)
|
||||||
|
|
||||||
|
def send(self, sock): # pylint: disable=unused-argument
|
||||||
|
LOG.error("Should not get here")
|
||||||
|
|
||||||
|
|
||||||
|
class PatchMessageAgentInstallResp(messages.PatchMessage):
|
||||||
|
def __init__(self):
|
||||||
|
messages.PatchMessage.__init__(self, messages.PATCHMSG_AGENT_INSTALL_RESP)
|
||||||
|
self.status = False
|
||||||
|
self.reject_reason = None
|
||||||
|
|
||||||
|
def encode(self):
|
||||||
|
global pa
|
||||||
|
messages.PatchMessage.encode(self)
|
||||||
|
self.message['status'] = self.status
|
||||||
|
if self.reject_reason is not None:
|
||||||
|
self.message['reject_reason'] = self.reject_reason
|
||||||
|
|
||||||
|
def handle(self, sock, addr):
|
||||||
|
LOG.error("Should not get here")
|
||||||
|
|
||||||
|
def send(self, sock, addr):
|
||||||
|
address = (addr[0], cfg.controller_port)
|
||||||
|
self.encode()
|
||||||
|
message = json.dumps(self.message)
|
||||||
|
sock.sendto(str.encode(message), address)
|
||||||
|
|
||||||
|
# Send a hello ack to follow it
|
||||||
|
resp = PatchMessageHelloAgentAck()
|
||||||
|
resp.send(sock)
|
||||||
|
|
||||||
|
|
||||||
|
class PatchAgentDnfTransLogCB(dnf.callback.TransactionProgress):
|
||||||
|
def __init__(self):
|
||||||
|
dnf.callback.TransactionProgress.__init__(self)
|
||||||
|
|
||||||
|
self.log_prefix = 'dnf trans'
|
||||||
|
|
||||||
|
def progress(self, package, action, ti_done, ti_total, ts_done, ts_total):
|
||||||
|
if action in dnf.transaction.ACTIONS:
|
||||||
|
action_str = dnf.transaction.ACTIONS[action]
|
||||||
|
elif action == dnf.transaction.TRANS_POST:
|
||||||
|
action_str = 'Post transaction'
|
||||||
|
else:
|
||||||
|
action_str = 'unknown(%d)' % action
|
||||||
|
|
||||||
|
if ti_done is not None:
|
||||||
|
# To reduce the volume of logs, only log 0% and 100%
|
||||||
|
if ti_done == 0 or ti_done == ti_total:
|
||||||
|
LOG.info('%s PROGRESS %s: %s %0.1f%% [%s/%s]',
|
||||||
|
self.log_prefix, action_str, package,
|
||||||
|
(ti_done * 100 // ti_total),
|
||||||
|
ts_done, ts_total)
|
||||||
|
else:
|
||||||
|
LOG.info('%s PROGRESS %s: %s [%s/%s]',
|
||||||
|
self.log_prefix, action_str, package, ts_done, ts_total)
|
||||||
|
|
||||||
|
def filelog(self, package, action):
|
||||||
|
if action in dnf.transaction.FILE_ACTIONS:
|
||||||
|
msg = '%s: %s' % (dnf.transaction.FILE_ACTIONS[action], package)
|
||||||
|
else:
|
||||||
|
msg = '%s: %s' % (package, action)
|
||||||
|
LOG.info('%s FILELOG %s', self.log_prefix, msg)
|
||||||
|
|
||||||
|
def scriptout(self, msgs):
|
||||||
|
if msgs:
|
||||||
|
LOG.info("%s SCRIPTOUT :\n%s", self.log_prefix, msgs)
|
||||||
|
|
||||||
|
def error(self, message):
|
||||||
|
LOG.error("%s ERROR: %s", self.log_prefix, message)
|
||||||
|
|
||||||
|
|
||||||
|
class PatchAgent(PatchService):
|
||||||
|
def __init__(self):
|
||||||
|
PatchService.__init__(self)
|
||||||
|
self.sock_out = None
|
||||||
|
self.sock_in = None
|
||||||
|
self.controller_address = None
|
||||||
|
self.listener = None
|
||||||
|
self.changes = False
|
||||||
|
self.installed = {}
|
||||||
|
self.installed_dnf = []
|
||||||
|
self.to_install = {}
|
||||||
|
self.to_install_dnf = []
|
||||||
|
self.to_downgrade_dnf = []
|
||||||
|
self.to_remove = []
|
||||||
|
self.to_remove_dnf = []
|
||||||
|
self.missing_pkgs = []
|
||||||
|
self.missing_pkgs_dnf = []
|
||||||
|
self.duplicated_pkgs = {}
|
||||||
|
self.patch_op_counter = 0
|
||||||
|
self.node_is_patched = os.path.exists(node_is_patched_file)
|
||||||
|
self.node_is_patched_timestamp = 0
|
||||||
|
self.query_id = 0
|
||||||
|
self.state = constants.PATCH_AGENT_STATE_IDLE
|
||||||
|
self.last_config_audit = 0
|
||||||
|
self.rejection_timestamp = 0
|
||||||
|
self.dnfb = None
|
||||||
|
self.last_repo_revision = None
|
||||||
|
|
||||||
|
# Check state flags
|
||||||
|
if os.path.exists(patch_installing_file):
|
||||||
|
# We restarted while installing. Change to failed
|
||||||
|
setflag(patch_failed_file)
|
||||||
|
os.remove(patch_installing_file)
|
||||||
|
|
||||||
|
if os.path.exists(patch_failed_file):
|
||||||
|
self.state = constants.PATCH_AGENT_STATE_INSTALL_FAILED
|
||||||
|
|
||||||
|
self.patch_failed = os.path.exists(patch_failed_file)
|
||||||
|
|
||||||
|
def update_config(self):
|
||||||
|
cfg.read_config()
|
||||||
|
|
||||||
|
if self.port != cfg.agent_port:
|
||||||
|
self.port = cfg.agent_port
|
||||||
|
|
||||||
|
# Loopback interface does not support multicast messaging, therefore
|
||||||
|
# revert to using unicast messaging when configured against the
|
||||||
|
# loopback device
|
||||||
|
if cfg.get_mgmt_iface() == constants.LOOPBACK_INTERFACE_NAME:
|
||||||
|
self.mcast_addr = None
|
||||||
|
self.controller_address = cfg.get_mgmt_ip()
|
||||||
|
else:
|
||||||
|
self.mcast_addr = cfg.agent_mcast_group
|
||||||
|
self.controller_address = cfg.controller_mcast_group
|
||||||
|
|
||||||
|
def setup_tcp_socket(self):
|
||||||
|
address_family = utils.get_management_family()
|
||||||
|
self.listener = socket.socket(address_family, socket.SOCK_STREAM)
|
||||||
|
self.listener.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
self.listener.bind(('', self.port))
|
||||||
|
self.listener.listen(2) # Allow two connections, for two controllers
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def pkgobj_to_version_str(pkg):
|
||||||
|
# Transform pkgobj version to format used by patch-controller
|
||||||
|
if pkg.epoch != 0:
|
||||||
|
output = "%s:%s-%s@%s" % (pkg.epoch, pkg.version, pkg.release, pkg.arch)
|
||||||
|
else:
|
||||||
|
output = "%s-%s@%s" % (pkg.version, pkg.release, pkg.arch)
|
||||||
|
|
||||||
|
return output
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def pkgobjs_to_list(pkgobjs):
|
||||||
|
# Transform pkgobj list to format used by patch-controller
|
||||||
|
output = {}
|
||||||
|
for pkg in pkgobjs:
|
||||||
|
output[pkg.name] = PatchAgent.pkgobj_to_version_str(pkg)
|
||||||
|
|
||||||
|
return output
|
||||||
|
|
||||||
|
def dnf_reset_client(self):
|
||||||
|
if self.dnfb is not None:
|
||||||
|
self.dnfb.close()
|
||||||
|
self.dnfb = None
|
||||||
|
|
||||||
|
self.dnfb = dnf.Base()
|
||||||
|
self.dnfb.conf.substitutions['infra'] = 'stock'
|
||||||
|
|
||||||
|
# Reset default installonlypkgs list
|
||||||
|
self.dnfb.conf.installonlypkgs = []
|
||||||
|
|
||||||
|
self.dnfb.read_all_repos()
|
||||||
|
|
||||||
|
# Ensure only platform repos are enabled for transaction
|
||||||
|
for repo in self.dnfb.repos.all():
|
||||||
|
if repo.id == 'platform-base' or repo.id == 'platform-updates':
|
||||||
|
repo.enable()
|
||||||
|
else:
|
||||||
|
repo.disable()
|
||||||
|
|
||||||
|
# Read repo info
|
||||||
|
self.dnfb.fill_sack()
|
||||||
|
|
||||||
|
def query(self, check_revision=False):
|
||||||
|
""" Check current patch state """
|
||||||
|
if not check_install_uuid():
|
||||||
|
LOG.info("Failed install_uuid check. Skipping query")
|
||||||
|
return False
|
||||||
|
|
||||||
|
if self.dnfb is not None:
|
||||||
|
self.dnfb.close()
|
||||||
|
self.dnfb = None
|
||||||
|
|
||||||
|
# TODO(dpenney): Use python APIs for makecache
|
||||||
|
try:
|
||||||
|
subprocess.check_output(dnf_makecache, stderr=subprocess.STDOUT)
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
LOG.error("Failed to run dnf makecache")
|
||||||
|
LOG.error("Command output: %s", e.output)
|
||||||
|
# Set a state to "unknown"?
|
||||||
|
return False
|
||||||
|
|
||||||
|
self.dnf_reset_client()
|
||||||
|
current_repo_revision = self.dnfb.repos['platform-updates']._repo.getRevision() # pylint: disable=protected-access
|
||||||
|
|
||||||
|
if check_revision and self.last_repo_revision is not None:
|
||||||
|
# We're expecting the revision to be updated.
|
||||||
|
# If it's not, we ended up getting a cached repomd query.
|
||||||
|
if current_repo_revision == self.last_repo_revision:
|
||||||
|
LOG.info("makecache returned same revision as previous (%s). Retry after one second",
|
||||||
|
current_repo_revision)
|
||||||
|
time.sleep(1)
|
||||||
|
try:
|
||||||
|
subprocess.check_output(dnf_makecache, stderr=subprocess.STDOUT)
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
LOG.error("Failed to run dnf makecache")
|
||||||
|
LOG.error("Command output: %s", e.output)
|
||||||
|
# Set a state to "unknown"?
|
||||||
|
return False
|
||||||
|
|
||||||
|
self.dnf_reset_client()
|
||||||
|
current_repo_revision = self.dnfb.repos['platform-updates']._repo.getRevision() # pylint: disable=protected-access
|
||||||
|
if current_repo_revision != self.last_repo_revision:
|
||||||
|
LOG.info("Stale repo revision id corrected with retry. New id: %s",
|
||||||
|
current_repo_revision)
|
||||||
|
|
||||||
|
self.last_repo_revision = current_repo_revision
|
||||||
|
|
||||||
|
# Generate a unique query id
|
||||||
|
self.query_id = random.random()
|
||||||
|
|
||||||
|
self.changes = False
|
||||||
|
self.installed_dnf = []
|
||||||
|
self.installed = {}
|
||||||
|
self.to_install_dnf = []
|
||||||
|
self.to_downgrade_dnf = []
|
||||||
|
self.to_remove = []
|
||||||
|
self.to_remove_dnf = []
|
||||||
|
self.missing_pkgs = []
|
||||||
|
self.missing_pkgs_dnf = []
|
||||||
|
|
||||||
|
# Get the repo data
|
||||||
|
pkgs_installed = dnf.sack._rpmdb_sack(self.dnfb).query().installed() # pylint: disable=protected-access
|
||||||
|
avail = self.dnfb.sack.query().available().latest()
|
||||||
|
|
||||||
|
# Check for packages with multiple installed versions
|
||||||
|
self.duplicated_pkgs = {}
|
||||||
|
for pkg in pkgs_installed:
|
||||||
|
pkglist = pkgs_installed.filter(name=pkg.name, arch=pkg.arch)
|
||||||
|
if len(pkglist) > 1:
|
||||||
|
if pkg.name not in self.duplicated_pkgs:
|
||||||
|
self.duplicated_pkgs[pkg.name] = {}
|
||||||
|
if pkg.arch not in self.duplicated_pkgs[pkg.name]:
|
||||||
|
self.duplicated_pkgs[pkg.name][pkg.arch] = list(map(PatchAgent.pkgobj_to_version_str, pkglist))
|
||||||
|
LOG.warn("Duplicate packages installed: %s %s",
|
||||||
|
pkg.name, ", ".join(self.duplicated_pkgs[pkg.name][pkg.arch]))
|
||||||
|
|
||||||
|
# There are three possible actions:
|
||||||
|
# 1. If installed pkg is not in a repo, remove it.
|
||||||
|
# 2. If installed pkg version does not match newest repo version, update it.
|
||||||
|
# 3. If a package in the grouplist is not installed, install it.
|
||||||
|
|
||||||
|
for pkg in pkgs_installed:
|
||||||
|
highest = avail.filter(name=pkg.name, arch=pkg.arch)
|
||||||
|
if highest:
|
||||||
|
highest_pkg = highest[0]
|
||||||
|
|
||||||
|
if pkg.evr_eq(highest_pkg):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if pkg.evr_gt(highest_pkg):
|
||||||
|
self.to_downgrade_dnf.append(highest_pkg)
|
||||||
|
else:
|
||||||
|
self.to_install_dnf.append(highest_pkg)
|
||||||
|
else:
|
||||||
|
self.to_remove_dnf.append(pkg)
|
||||||
|
self.to_remove.append(pkg.name)
|
||||||
|
|
||||||
|
self.installed_dnf.append(pkg)
|
||||||
|
self.changes = True
|
||||||
|
|
||||||
|
# Look for new packages
|
||||||
|
self.dnfb.read_comps()
|
||||||
|
grp_id = 'updates-%s' % '-'.join(subfunctions)
|
||||||
|
pkggrp = None
|
||||||
|
for grp in self.dnfb.comps.groups_iter():
|
||||||
|
if grp.id == grp_id:
|
||||||
|
pkggrp = grp
|
||||||
|
break
|
||||||
|
|
||||||
|
if pkggrp is None:
|
||||||
|
LOG.error("Could not find software group: %s", grp_id)
|
||||||
|
|
||||||
|
for pkg in pkggrp.packages_iter():
|
||||||
|
try:
|
||||||
|
res = pkgs_installed.filter(name=pkg.name)
|
||||||
|
if len(res) == 0:
|
||||||
|
found_pkg = avail.filter(name=pkg.name)
|
||||||
|
self.missing_pkgs_dnf.append(found_pkg[0])
|
||||||
|
self.missing_pkgs.append(found_pkg[0].name)
|
||||||
|
self.changes = True
|
||||||
|
except dnf.exceptions.PackageNotFoundError:
|
||||||
|
self.missing_pkgs_dnf.append(pkg)
|
||||||
|
self.missing_pkgs.append(pkg.name)
|
||||||
|
self.changes = True
|
||||||
|
|
||||||
|
self.installed = self.pkgobjs_to_list(self.installed_dnf)
|
||||||
|
self.to_install = self.pkgobjs_to_list(self.to_install_dnf + self.to_downgrade_dnf)
|
||||||
|
|
||||||
|
LOG.info("Patch state query returns %s", self.changes)
|
||||||
|
LOG.info("Installed: %s", self.installed)
|
||||||
|
LOG.info("To install: %s", self.to_install)
|
||||||
|
LOG.info("To remove: %s", self.to_remove)
|
||||||
|
LOG.info("Missing: %s", self.missing_pkgs)
|
||||||
|
if len(self.duplicated_pkgs) > 0:
|
||||||
|
LOG.info("Duplicated: %s", self.duplicated_pkgs)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def resolve_dnf_transaction(self, undo_failure=True):
|
||||||
|
LOG.info("Starting to process transaction: undo_failure=%s", undo_failure)
|
||||||
|
self.dnfb.resolve()
|
||||||
|
self.dnfb.download_packages(self.dnfb.transaction.install_set)
|
||||||
|
|
||||||
|
tid = self.dnfb.do_transaction(display=PatchAgentDnfTransLogCB())
|
||||||
|
|
||||||
|
transaction_rc = True
|
||||||
|
for t in self.dnfb.transaction:
|
||||||
|
if t.state != libdnf.transaction.TransactionItemState_DONE:
|
||||||
|
transaction_rc = False
|
||||||
|
break
|
||||||
|
|
||||||
|
self.dnf_reset_client()
|
||||||
|
|
||||||
|
if not transaction_rc:
|
||||||
|
if undo_failure:
|
||||||
|
LOG.error("Failure occurred... Undoing last transaction (%s)", tid)
|
||||||
|
old = self.dnfb.history.old((tid,))[0]
|
||||||
|
mobj = dnf.db.history.MergedTransactionWrapper(old)
|
||||||
|
|
||||||
|
self.dnfb._history_undo_operations(mobj, old.tid, True) # pylint: disable=protected-access
|
||||||
|
|
||||||
|
if not self.resolve_dnf_transaction(undo_failure=False):
|
||||||
|
LOG.error("Failed to undo transaction")
|
||||||
|
|
||||||
|
LOG.info("Transaction complete: undo_failure=%s, success=%s", undo_failure, transaction_rc)
|
||||||
|
return transaction_rc
|
||||||
|
|
||||||
|
def handle_install(self, verbose_to_stdout=False, disallow_insvc_patch=False):
|
||||||
|
#
|
||||||
|
# The disallow_insvc_patch parameter is set when we're installing
|
||||||
|
# the patch during init. At that time, we don't want to deal with
|
||||||
|
# in-service patch scripts, so instead we'll treat any patch as
|
||||||
|
# a reboot-required when this parameter is set. Rather than running
|
||||||
|
# any scripts, the RR flag will be set, which will result in the node
|
||||||
|
# being rebooted immediately upon completion of the installation.
|
||||||
|
#
|
||||||
|
|
||||||
|
LOG.info("Handling install")
|
||||||
|
|
||||||
|
# Check the INSTALL_UUID first. If it doesn't match the active
|
||||||
|
# controller, we don't want to install patches.
|
||||||
|
if not check_install_uuid():
|
||||||
|
LOG.error("Failed install_uuid check. Skipping install")
|
||||||
|
|
||||||
|
self.patch_failed = True
|
||||||
|
setflag(patch_failed_file)
|
||||||
|
self.state = constants.PATCH_AGENT_STATE_INSTALL_FAILED
|
||||||
|
|
||||||
|
# Send a hello to provide a state update
|
||||||
|
if self.sock_out is not None:
|
||||||
|
hello_ack = PatchMessageHelloAgentAck()
|
||||||
|
hello_ack.send(self.sock_out)
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
self.state = constants.PATCH_AGENT_STATE_INSTALLING
|
||||||
|
setflag(patch_installing_file)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Create insvc patch directories
|
||||||
|
if os.path.exists(insvc_patch_scripts):
|
||||||
|
shutil.rmtree(insvc_patch_scripts, ignore_errors=True)
|
||||||
|
if os.path.exists(insvc_patch_flags):
|
||||||
|
shutil.rmtree(insvc_patch_flags, ignore_errors=True)
|
||||||
|
os.mkdir(insvc_patch_scripts, 0o700)
|
||||||
|
os.mkdir(insvc_patch_flags, 0o700)
|
||||||
|
except Exception:
|
||||||
|
LOG.exception("Failed to create in-service patch directories")
|
||||||
|
|
||||||
|
# Send a hello to provide a state update
|
||||||
|
if self.sock_out is not None:
|
||||||
|
hello_ack = PatchMessageHelloAgentAck()
|
||||||
|
hello_ack.send(self.sock_out)
|
||||||
|
|
||||||
|
# Build up the install set
|
||||||
|
if verbose_to_stdout:
|
||||||
|
print("Checking for software updates...")
|
||||||
|
self.query()
|
||||||
|
|
||||||
|
changed = False
|
||||||
|
rc = True
|
||||||
|
|
||||||
|
if len(self.duplicated_pkgs) > 0:
|
||||||
|
LOG.error("Duplicate installed packages found. Manual recovery is required.")
|
||||||
|
rc = False
|
||||||
|
else:
|
||||||
|
if len(self.to_install_dnf) > 0 or len(self.to_downgrade_dnf) > 0:
|
||||||
|
LOG.info("Adding pkgs to installation set: %s", self.to_install)
|
||||||
|
for pkg in self.to_install_dnf:
|
||||||
|
self.dnfb.package_install(pkg)
|
||||||
|
|
||||||
|
for pkg in self.to_downgrade_dnf:
|
||||||
|
self.dnfb.package_downgrade(pkg)
|
||||||
|
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
if len(self.missing_pkgs_dnf) > 0:
|
||||||
|
LOG.info("Adding missing pkgs to installation set: %s", self.missing_pkgs)
|
||||||
|
for pkg in self.missing_pkgs_dnf:
|
||||||
|
self.dnfb.package_install(pkg)
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
if len(self.to_remove_dnf) > 0:
|
||||||
|
LOG.info("Adding pkgs to be removed: %s", self.to_remove)
|
||||||
|
for pkg in self.to_remove_dnf:
|
||||||
|
self.dnfb.package_remove(pkg)
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
if changed:
|
||||||
|
# Run the transaction set
|
||||||
|
transaction_rc = False
|
||||||
|
try:
|
||||||
|
transaction_rc = self.resolve_dnf_transaction()
|
||||||
|
except dnf.exceptions.DepsolveError:
|
||||||
|
LOG.exception("Failures resolving dependencies in transaction")
|
||||||
|
except dnf.exceptions.DownloadError:
|
||||||
|
LOG.exception("Failures downloading in transaction")
|
||||||
|
except dnf.exceptions.Error:
|
||||||
|
LOG.exception("Failure resolving transaction")
|
||||||
|
|
||||||
|
if not transaction_rc:
|
||||||
|
LOG.error("Failures occurred during transaction")
|
||||||
|
rc = False
|
||||||
|
if verbose_to_stdout:
|
||||||
|
print("WARNING: Software update failed.")
|
||||||
|
|
||||||
|
else:
|
||||||
|
if verbose_to_stdout:
|
||||||
|
print("Nothing to install.")
|
||||||
|
LOG.info("Nothing to install")
|
||||||
|
|
||||||
|
if changed and rc:
|
||||||
|
# Update the node_is_patched flag
|
||||||
|
setflag(node_is_patched_file)
|
||||||
|
|
||||||
|
self.node_is_patched = True
|
||||||
|
if verbose_to_stdout:
|
||||||
|
print("This node has been patched.")
|
||||||
|
|
||||||
|
if os.path.exists(node_is_patched_rr_file):
|
||||||
|
LOG.info("Reboot is required. Skipping patch-scripts")
|
||||||
|
elif disallow_insvc_patch:
|
||||||
|
LOG.info("Disallowing patch-scripts. Treating as reboot-required")
|
||||||
|
setflag(node_is_patched_rr_file)
|
||||||
|
else:
|
||||||
|
LOG.info("Running in-service patch-scripts")
|
||||||
|
|
||||||
|
try:
|
||||||
|
subprocess.check_output(run_insvc_patch_scripts_cmd, stderr=subprocess.STDOUT)
|
||||||
|
|
||||||
|
# Clear the node_is_patched flag, since we've handled it in-service
|
||||||
|
clearflag(node_is_patched_file)
|
||||||
|
self.node_is_patched = False
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
LOG.exception("In-Service patch scripts failed")
|
||||||
|
LOG.error("Command output: %s", e.output)
|
||||||
|
# Fail the patching operation
|
||||||
|
rc = False
|
||||||
|
|
||||||
|
# Clear the in-service patch dirs
|
||||||
|
if os.path.exists(insvc_patch_scripts):
|
||||||
|
shutil.rmtree(insvc_patch_scripts, ignore_errors=True)
|
||||||
|
if os.path.exists(insvc_patch_flags):
|
||||||
|
shutil.rmtree(insvc_patch_flags, ignore_errors=True)
|
||||||
|
|
||||||
|
if rc:
|
||||||
|
self.patch_failed = False
|
||||||
|
clearflag(patch_failed_file)
|
||||||
|
self.state = constants.PATCH_AGENT_STATE_IDLE
|
||||||
|
else:
|
||||||
|
# Update the patch_failed flag
|
||||||
|
self.patch_failed = True
|
||||||
|
setflag(patch_failed_file)
|
||||||
|
self.state = constants.PATCH_AGENT_STATE_INSTALL_FAILED
|
||||||
|
|
||||||
|
clearflag(patch_installing_file)
|
||||||
|
self.query()
|
||||||
|
|
||||||
|
# Send a hello to provide a state update
|
||||||
|
if self.sock_out is not None:
|
||||||
|
hello_ack = PatchMessageHelloAgentAck()
|
||||||
|
hello_ack.send(self.sock_out)
|
||||||
|
|
||||||
|
return rc
|
||||||
|
|
||||||
|
def handle_patch_op_counter(self, counter):
|
||||||
|
changed = False
|
||||||
|
if os.path.exists(node_is_patched_file):
|
||||||
|
# The node has been patched. Run a query if:
|
||||||
|
# - node_is_patched didn't exist previously
|
||||||
|
# - node_is_patched timestamp changed
|
||||||
|
timestamp = os.path.getmtime(node_is_patched_file)
|
||||||
|
if not self.node_is_patched:
|
||||||
|
self.node_is_patched = True
|
||||||
|
self.node_is_patched_timestamp = timestamp
|
||||||
|
changed = True
|
||||||
|
elif self.node_is_patched_timestamp != timestamp:
|
||||||
|
self.node_is_patched_timestamp = timestamp
|
||||||
|
changed = True
|
||||||
|
elif self.node_is_patched:
|
||||||
|
self.node_is_patched = False
|
||||||
|
self.node_is_patched_timestamp = 0
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
if self.patch_op_counter < counter:
|
||||||
|
self.patch_op_counter = counter
|
||||||
|
changed = True
|
||||||
|
|
||||||
|
if changed:
|
||||||
|
rc = self.query(check_revision=True)
|
||||||
|
if not rc:
|
||||||
|
# Query failed. Reset the op counter
|
||||||
|
self.patch_op_counter = 0
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
self.setup_socket()
|
||||||
|
|
||||||
|
while self.sock_out is None:
|
||||||
|
# Check every thirty seconds?
|
||||||
|
# Once we've got a conf file, tied into packstack,
|
||||||
|
# we'll get restarted when the file is updated,
|
||||||
|
# and this should be unnecessary.
|
||||||
|
time.sleep(30)
|
||||||
|
self.setup_socket()
|
||||||
|
|
||||||
|
self.setup_tcp_socket()
|
||||||
|
|
||||||
|
# Ok, now we've got our socket.
|
||||||
|
# Let's let the controllers know we're here
|
||||||
|
hello_ack = PatchMessageHelloAgentAck()
|
||||||
|
hello_ack.send(self.sock_out)
|
||||||
|
|
||||||
|
first_hello = True
|
||||||
|
|
||||||
|
connections = []
|
||||||
|
|
||||||
|
timeout = time.time() + 30.0
|
||||||
|
remaining = 30
|
||||||
|
|
||||||
|
while True:
|
||||||
|
inputs = [self.sock_in, self.listener] + connections
|
||||||
|
outputs = []
|
||||||
|
|
||||||
|
rlist, wlist, xlist = select.select(inputs, outputs, inputs, remaining)
|
||||||
|
|
||||||
|
remaining = int(timeout - time.time())
|
||||||
|
if remaining <= 0 or remaining > 30:
|
||||||
|
timeout = time.time() + 30.0
|
||||||
|
remaining = 30
|
||||||
|
|
||||||
|
if (len(rlist) == 0 and
|
||||||
|
len(wlist) == 0 and
|
||||||
|
len(xlist) == 0):
|
||||||
|
# Timeout hit
|
||||||
|
self.audit_socket()
|
||||||
|
continue
|
||||||
|
|
||||||
|
for s in rlist:
|
||||||
|
if s == self.listener:
|
||||||
|
conn, addr = s.accept()
|
||||||
|
connections.append(conn)
|
||||||
|
continue
|
||||||
|
|
||||||
|
data = ''
|
||||||
|
addr = None
|
||||||
|
msg = None
|
||||||
|
|
||||||
|
if s == self.sock_in:
|
||||||
|
# Receive from UDP
|
||||||
|
data, addr = s.recvfrom(1024)
|
||||||
|
else:
|
||||||
|
# Receive from TCP
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
packet = s.recv(1024)
|
||||||
|
except socket.error:
|
||||||
|
LOG.exception("Socket error on recv")
|
||||||
|
data = ''
|
||||||
|
break
|
||||||
|
|
||||||
|
if packet:
|
||||||
|
data += packet.decode()
|
||||||
|
|
||||||
|
if data == '':
|
||||||
|
break
|
||||||
|
|
||||||
|
try:
|
||||||
|
json.loads(data)
|
||||||
|
break
|
||||||
|
except ValueError:
|
||||||
|
# Message is incomplete
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
# End of TCP message received
|
||||||
|
break
|
||||||
|
|
||||||
|
if data == '':
|
||||||
|
# Connection dropped
|
||||||
|
connections.remove(s)
|
||||||
|
s.close()
|
||||||
|
continue
|
||||||
|
|
||||||
|
msgdata = json.loads(data)
|
||||||
|
|
||||||
|
# For now, discard any messages that are not msgversion==1
|
||||||
|
if 'msgversion' in msgdata and msgdata['msgversion'] != 1:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if 'msgtype' in msgdata:
|
||||||
|
if msgdata['msgtype'] == messages.PATCHMSG_HELLO_AGENT:
|
||||||
|
if first_hello:
|
||||||
|
self.query()
|
||||||
|
first_hello = False
|
||||||
|
|
||||||
|
msg = PatchMessageHelloAgent()
|
||||||
|
elif msgdata['msgtype'] == messages.PATCHMSG_QUERY_DETAILED:
|
||||||
|
msg = PatchMessageQueryDetailed()
|
||||||
|
elif msgdata['msgtype'] == messages.PATCHMSG_AGENT_INSTALL_REQ:
|
||||||
|
msg = PatchMessageAgentInstallReq()
|
||||||
|
|
||||||
|
if msg is None:
|
||||||
|
msg = messages.PatchMessage()
|
||||||
|
|
||||||
|
msg.decode(msgdata)
|
||||||
|
if s == self.sock_in:
|
||||||
|
msg.handle(self.sock_out, addr)
|
||||||
|
else:
|
||||||
|
msg.handle(s, addr)
|
||||||
|
|
||||||
|
for s in xlist:
|
||||||
|
if s in connections:
|
||||||
|
connections.remove(s)
|
||||||
|
s.close()
|
||||||
|
|
||||||
|
# Check for in-service patch restart flag
|
||||||
|
if os.path.exists(insvc_patch_restart_agent):
|
||||||
|
# Make sure it's safe to restart, ie. no reqs queued
|
||||||
|
rlist, wlist, xlist = select.select(inputs, outputs, inputs, 0)
|
||||||
|
if (len(rlist) == 0 and
|
||||||
|
len(wlist) == 0 and
|
||||||
|
len(xlist) == 0):
|
||||||
|
# Restart
|
||||||
|
LOG.info("In-service patch restart flag detected. Exiting.")
|
||||||
|
os.remove(insvc_patch_restart_agent)
|
||||||
|
exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
global pa
|
||||||
|
|
||||||
|
configure_logging(dnf_log=True)
|
||||||
|
|
||||||
|
cfg.read_config()
|
||||||
|
|
||||||
|
pa = PatchAgent()
|
||||||
|
pa.query()
|
||||||
|
|
||||||
|
if len(sys.argv) <= 1:
|
||||||
|
pa.run()
|
||||||
|
elif sys.argv[1] == "--install":
|
||||||
|
if not check_install_uuid():
|
||||||
|
# In certain cases, the lighttpd server could still be running using
|
||||||
|
# its default port 80, as opposed to the port configured in platform.conf
|
||||||
|
global http_port_real
|
||||||
|
LOG.info("Failed install_uuid check via http_port=%s. Trying with default port 80", http_port_real)
|
||||||
|
http_port_real = 80
|
||||||
|
|
||||||
|
pa.handle_install(verbose_to_stdout=True, disallow_insvc_patch=True)
|
||||||
|
elif sys.argv[1] == "--status":
|
||||||
|
rc = 0
|
||||||
|
if pa.changes:
|
||||||
|
rc = 1
|
||||||
|
exit(rc)
|
1513
sw-patch/cgcs-patch/cgcs_patch/patch_client.py
Normal file
1513
sw-patch/cgcs-patch/cgcs_patch/patch_client.py
Normal file
File diff suppressed because it is too large
Load Diff
2713
sw-patch/cgcs-patch/cgcs_patch/patch_controller.py
Normal file
2713
sw-patch/cgcs-patch/cgcs_patch/patch_controller.py
Normal file
File diff suppressed because it is too large
Load Diff
1440
sw-patch/cgcs-patch/cgcs_patch/patch_functions.py
Normal file
1440
sw-patch/cgcs-patch/cgcs_patch/patch_functions.py
Normal file
File diff suppressed because it is too large
Load Diff
90
sw-patch/cgcs-patch/cgcs_patch/patch_signing.py
Normal file
90
sw-patch/cgcs-patch/cgcs_patch/patch_signing.py
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
"""
|
||||||
|
Copyright (c) 2017 Wind River Systems, Inc.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
from Cryptodome.Signature import PKCS1_PSS
|
||||||
|
from Cryptodome.Hash import SHA256
|
||||||
|
from Cryptodome.PublicKey import RSA # pylint: disable=unused-import
|
||||||
|
from Cryptodome.Util.asn1 import DerSequence # pylint: disable=unused-import
|
||||||
|
from binascii import a2b_base64 # pylint: disable=unused-import
|
||||||
|
from cgcs_patch.patch_verify import read_RSA_key
|
||||||
|
from cgcs_patch.patch_verify import cert_type_formal_str
|
||||||
|
from cgcs_patch.patch_verify import cert_type_dev_str
|
||||||
|
|
||||||
|
# To save memory, read and hash 1M of files at a time
|
||||||
|
default_blocksize = 1 * 1024 * 1024
|
||||||
|
|
||||||
|
# When we sign patches, look for private keys in the following paths
|
||||||
|
#
|
||||||
|
# The (currently hardcoded) path on the signing server will be replaced
|
||||||
|
# by the capability to specify filename from calling function.
|
||||||
|
private_key_files = {cert_type_formal_str: '/signing/keys/formal-private-key.pem',
|
||||||
|
cert_type_dev_str: os.path.expandvars('$MY_REPO/build-tools/signing/dev-private-key.pem')
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def sign_files(filenames, signature_file, private_key=None, cert_type=None):
|
||||||
|
"""
|
||||||
|
Utility function for signing data in files.
|
||||||
|
:param filenames: A list of files containing the data to be signed
|
||||||
|
:param signature_file: The name of the file to which the signature will be
|
||||||
|
stored
|
||||||
|
:param private_key: If specified, sign with this private key. Otherwise,
|
||||||
|
the files in private_key_files will be searched for
|
||||||
|
and used, if found.
|
||||||
|
:param cert_type: If specified, and private_key is not specified, sign
|
||||||
|
with a key of the specified type. e.g. 'dev' or 'formal'
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Hash the data across all files
|
||||||
|
blocksize = default_blocksize
|
||||||
|
data_hash = SHA256.new()
|
||||||
|
for filename in filenames:
|
||||||
|
with open(filename, 'rb') as infile:
|
||||||
|
data = infile.read(blocksize)
|
||||||
|
while len(data) > 0:
|
||||||
|
data_hash.update(data)
|
||||||
|
data = infile.read(blocksize)
|
||||||
|
|
||||||
|
# Find a private key to use, if not already provided
|
||||||
|
need_resign_with_formal = False
|
||||||
|
if private_key is None:
|
||||||
|
if cert_type is not None:
|
||||||
|
# A Specific key is asked for
|
||||||
|
assert (cert_type in list(private_key_files)), "cert_type=%s is not a known cert type" % cert_type
|
||||||
|
dict_key = cert_type
|
||||||
|
filename = private_key_files[dict_key]
|
||||||
|
# print 'cert_type given: Checking to see if ' + filename + ' exists\n'
|
||||||
|
if not os.path.exists(filename) and dict_key == cert_type_formal_str:
|
||||||
|
# The formal key is asked for, but is not locally available,
|
||||||
|
# substitute the dev key, and we will try to resign with the formal later.
|
||||||
|
dict_key = cert_type_dev_str
|
||||||
|
filename = private_key_files[dict_key]
|
||||||
|
need_resign_with_formal = True
|
||||||
|
if os.path.exists(filename):
|
||||||
|
# print 'Getting private key from ' + filename + '\n'
|
||||||
|
private_key = read_RSA_key(open(filename, 'rb').read())
|
||||||
|
else:
|
||||||
|
# Search for available keys
|
||||||
|
for dict_key in private_key_files.keys():
|
||||||
|
filename = private_key_files[dict_key]
|
||||||
|
# print 'Search for available keys: Checking to see if ' + filename + ' exists\n'
|
||||||
|
if os.path.exists(filename):
|
||||||
|
# print 'Getting private key from ' + filename + '\n'
|
||||||
|
private_key = read_RSA_key(open(filename, 'rb').read())
|
||||||
|
|
||||||
|
assert (private_key is not None), "Could not find signing key"
|
||||||
|
|
||||||
|
# Encrypt the hash (sign the data) with the key we find
|
||||||
|
signer = PKCS1_PSS.new(private_key)
|
||||||
|
signature = signer.sign(data_hash)
|
||||||
|
|
||||||
|
# Save it
|
||||||
|
with open(signature_file, 'wb') as outfile:
|
||||||
|
outfile.write(signature)
|
||||||
|
|
||||||
|
return need_resign_with_formal
|
191
sw-patch/cgcs-patch/cgcs_patch/patch_verify.py
Normal file
191
sw-patch/cgcs-patch/cgcs_patch/patch_verify.py
Normal file
@ -0,0 +1,191 @@
|
|||||||
|
"""
|
||||||
|
Copyright (c) 2017 Wind River Systems, Inc.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from Cryptodome.Signature import PKCS1_v1_5
|
||||||
|
from Cryptodome.Signature import PKCS1_PSS
|
||||||
|
from Cryptodome.Hash import SHA256
|
||||||
|
from Cryptodome.PublicKey import RSA
|
||||||
|
from Cryptodome.Util.asn1 import DerSequence
|
||||||
|
from binascii import a2b_base64
|
||||||
|
|
||||||
|
from cgcs_patch.certificates import dev_certificate
|
||||||
|
from cgcs_patch.certificates import formal_certificate
|
||||||
|
|
||||||
|
# To save memory, read and hash 1M of files at a time
|
||||||
|
default_blocksize = 1 * 1024 * 1024
|
||||||
|
|
||||||
|
dev_certificate_marker = '/etc/pki/wrs/dev_certificate_enable.bin'
|
||||||
|
LOG = logging.getLogger('main_logger')
|
||||||
|
|
||||||
|
cert_type_dev_str = 'dev'
|
||||||
|
cert_type_formal_str = 'formal'
|
||||||
|
cert_type_dev = [cert_type_dev_str]
|
||||||
|
cert_type_formal = [cert_type_formal_str]
|
||||||
|
cert_type_all = [cert_type_dev_str, cert_type_formal_str]
|
||||||
|
|
||||||
|
|
||||||
|
def verify_hash(data_hash, signature_bytes, certificate_list):
|
||||||
|
"""
|
||||||
|
Checks that a hash's signature can be validated against an approved
|
||||||
|
certificate
|
||||||
|
:param data_hash: A hash of the data to be validated
|
||||||
|
:param signature_bytes: A pre-generated signature (typically, the hash
|
||||||
|
encrypted with a private key)
|
||||||
|
:param certificate_list: A list of approved certificates or public keys
|
||||||
|
which the signature is validated against
|
||||||
|
:return: True if the signature was validated against a certificate
|
||||||
|
"""
|
||||||
|
verified = False
|
||||||
|
for cert in certificate_list:
|
||||||
|
if verified:
|
||||||
|
break
|
||||||
|
pub_key = read_RSA_key(cert)
|
||||||
|
pub_key.exportKey()
|
||||||
|
|
||||||
|
# PSS is the recommended signature scheme, but some tools (like OpenSSL)
|
||||||
|
# use the older v1_5 scheme. We try to validate against both.
|
||||||
|
#
|
||||||
|
# We use PSS for patch validation, but use v1_5 for ISO validation
|
||||||
|
# since we want to generate detached sigs that a customer can validate
|
||||||
|
# OpenSSL
|
||||||
|
verifier = PKCS1_PSS.new(pub_key)
|
||||||
|
try:
|
||||||
|
verified = verifier.verify(data_hash, signature_bytes) # pylint: disable=not-callable
|
||||||
|
except ValueError:
|
||||||
|
verified = False
|
||||||
|
|
||||||
|
if not verified:
|
||||||
|
verifier = PKCS1_v1_5.new(pub_key)
|
||||||
|
try:
|
||||||
|
verified = verifier.verify(data_hash, signature_bytes) # pylint: disable=not-callable
|
||||||
|
except ValueError:
|
||||||
|
verified = False
|
||||||
|
|
||||||
|
return verified
|
||||||
|
|
||||||
|
|
||||||
|
def get_public_certificates_by_type(cert_type=None):
|
||||||
|
"""
|
||||||
|
Builds a list of accepted certificates which can be used to validate
|
||||||
|
further things. This list may contain multiple certificates depending on
|
||||||
|
the configuration of the system and the value of cert_type.
|
||||||
|
|
||||||
|
:param cert_type: A list of strings, certificate types to include in list
|
||||||
|
'formal' - include formal certificate if available
|
||||||
|
'dev' - include developer certificate if available
|
||||||
|
:return: A list of certificates in PEM format
|
||||||
|
"""
|
||||||
|
|
||||||
|
if cert_type is None:
|
||||||
|
cert_type = cert_type_all
|
||||||
|
|
||||||
|
cert_list = []
|
||||||
|
|
||||||
|
if cert_type_formal_str in cert_type:
|
||||||
|
cert_list.append(formal_certificate)
|
||||||
|
|
||||||
|
if cert_type_dev_str in cert_type:
|
||||||
|
cert_list.append(dev_certificate)
|
||||||
|
|
||||||
|
return cert_list
|
||||||
|
|
||||||
|
|
||||||
|
def get_public_certificates():
|
||||||
|
"""
|
||||||
|
Builds a list of accepted certificates which can be used to validate
|
||||||
|
further things. This list may contain multiple certificates depending on
|
||||||
|
the configuration of the system (for instance, should we include the
|
||||||
|
developer certificate in the list).
|
||||||
|
:return: A list of certificates in PEM format
|
||||||
|
"""
|
||||||
|
cert_list = [formal_certificate]
|
||||||
|
|
||||||
|
# We enable the dev certificate based on the presence of a file. This file
|
||||||
|
# contains a hash of an arbitrary string ('Titanum patching') which has been
|
||||||
|
# encrypted with our formal private key. If the file is present (and valid)
|
||||||
|
# then we add the developer key to the approved certificates list
|
||||||
|
if os.path.exists(dev_certificate_marker):
|
||||||
|
with open(dev_certificate_marker, 'rb') as infile:
|
||||||
|
signature = infile.read()
|
||||||
|
data_hash = SHA256.new()
|
||||||
|
data_hash.update(b'Titanium patching')
|
||||||
|
if verify_hash(data_hash, signature, cert_list):
|
||||||
|
cert_list.append(dev_certificate)
|
||||||
|
else:
|
||||||
|
msg = "Invalid data found in " + dev_certificate_marker
|
||||||
|
LOG.error(msg)
|
||||||
|
|
||||||
|
return cert_list
|
||||||
|
|
||||||
|
|
||||||
|
def read_RSA_key(key_data):
|
||||||
|
"""
|
||||||
|
Utility function for reading an RSA key half from encoded data
|
||||||
|
:param key_data: PEM data containing raw key or X.509 certificate
|
||||||
|
:return: An RSA key object
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Handle data that is just a raw key
|
||||||
|
key = RSA.importKey(key_data)
|
||||||
|
except ValueError:
|
||||||
|
# The RSA.importKey function cannot read X.509 certificates directly
|
||||||
|
# (depending on the version of the Crypto library). Instead, we
|
||||||
|
# may need to extract the key from the certificate before building
|
||||||
|
# the key object
|
||||||
|
#
|
||||||
|
# We need to strip the BEGIN and END lines from PEM first
|
||||||
|
x509lines = key_data.replace(' ', '').split()
|
||||||
|
x509text = ''.join(x509lines[1:-1])
|
||||||
|
x509data = DerSequence()
|
||||||
|
x509data.decode(a2b_base64(x509text))
|
||||||
|
|
||||||
|
# X.509 contains a few parts. The first part (index 0) is the
|
||||||
|
# certificate itself, (TBS or "to be signed" cert) and the 7th field
|
||||||
|
# of that cert is subjectPublicKeyInfo, which can be imported.
|
||||||
|
# RFC3280
|
||||||
|
tbsCert = DerSequence()
|
||||||
|
tbsCert.decode(x509data[0])
|
||||||
|
|
||||||
|
# Initialize RSA key from the subjectPublicKeyInfo field
|
||||||
|
key = RSA.importKey(tbsCert[6])
|
||||||
|
return key
|
||||||
|
|
||||||
|
|
||||||
|
def verify_files(filenames, signature_file, cert_type=None):
|
||||||
|
"""
|
||||||
|
Verify data files against a detached signature.
|
||||||
|
:param filenames: A list of files containing the data which was signed
|
||||||
|
:param public_key_file: A file containing the public key or certificate
|
||||||
|
corresponding to the key which signed the data
|
||||||
|
:param signature_file: The name of the file containing the signature
|
||||||
|
:param cert_type: Only use specified certififcate type to verify (dev/formal)
|
||||||
|
:return: True if the signature was verified, False otherwise
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Hash the data across all files
|
||||||
|
blocksize = default_blocksize
|
||||||
|
data_hash = SHA256.new()
|
||||||
|
for filename in filenames:
|
||||||
|
with open(filename, 'rb') as infile:
|
||||||
|
data = infile.read(blocksize)
|
||||||
|
while len(data) > 0:
|
||||||
|
data_hash.update(data)
|
||||||
|
data = infile.read(blocksize)
|
||||||
|
|
||||||
|
# Get the signature
|
||||||
|
with open(signature_file, 'rb') as sig_file:
|
||||||
|
signature_bytes = sig_file.read()
|
||||||
|
|
||||||
|
# Verify the signature
|
||||||
|
if cert_type is None:
|
||||||
|
certificate_list = get_public_certificates()
|
||||||
|
else:
|
||||||
|
certificate_list = get_public_certificates_by_type(cert_type=cert_type)
|
||||||
|
return verify_hash(data_hash, signature_bytes, certificate_list)
|
92
sw-patch/cgcs-patch/cgcs_patch/templates/query.html
Normal file
92
sw-patch/cgcs-patch/cgcs_patch/templates/query.html
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
|
||||||
|
<br>
|
||||||
|
% if not pd is UNDEFINED and len(pd) > 0:
|
||||||
|
<table border="2" style="width:300px">
|
||||||
|
<tr>
|
||||||
|
<th>Patch ID</th>
|
||||||
|
<th>Patch Data</th>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
% for patch_id in sorted(pd.keys()):
|
||||||
|
${patchrow(patch_id)}
|
||||||
|
% endfor
|
||||||
|
</table>
|
||||||
|
% endif
|
||||||
|
|
||||||
|
% if not info is UNDEFINED and len(info) > 0:
|
||||||
|
<p>${info}</p>
|
||||||
|
% endif
|
||||||
|
|
||||||
|
% if not warning is UNDEFINED and len(warning) > 0:
|
||||||
|
<p>Warning:<br>${warning}</p>
|
||||||
|
% endif
|
||||||
|
|
||||||
|
% if not error is UNDEFINED and len(error) > 0:
|
||||||
|
<p>Error:<br>${error}</p>
|
||||||
|
% endif
|
||||||
|
|
||||||
|
<br><br>
|
||||||
|
<a href="/patch/query">Show all</a><br>
|
||||||
|
<a href="/patch/query?show=applied">Show applied</a><br>
|
||||||
|
<a href="/patch/query?show=available">Show available</a><br>
|
||||||
|
<a href="/patch/query_hosts">Query Hosts</a><br>
|
||||||
|
|
||||||
|
<br><br>
|
||||||
|
<form action="/patch/upload" method="POST" enctype="multipart/form-data">
|
||||||
|
<input type="file" name="file"/>
|
||||||
|
<button type="submit">Upload Patch</button>
|
||||||
|
</form>
|
||||||
|
|
||||||
|
<%def name="patchrow(patch_id)">
|
||||||
|
<%
|
||||||
|
p = pd[patch_id]
|
||||||
|
%>
|
||||||
|
<tr>
|
||||||
|
<td valign="top"><a href="/patch/show/${patch_id}">${patch_id}</a></td>
|
||||||
|
<td valign="top">
|
||||||
|
<table border="1" width=100%>
|
||||||
|
% if p["repostate"] != "":
|
||||||
|
<tr><td valign="top">Repo State:</td><td valign="top">${p["repostate"]}</td></tr>
|
||||||
|
% endif
|
||||||
|
% if p["patchstate"] != "":
|
||||||
|
<tr><td valign="top">Patch State:</td><td valign="top">${p["patchstate"]}</td></tr>
|
||||||
|
% endif
|
||||||
|
% if p["status"] != "":
|
||||||
|
<tr><td valign="top">Status:</td><td valign="top">${p["status"]}</td></tr>
|
||||||
|
% endif
|
||||||
|
% if p["unremovable" != ""]:
|
||||||
|
<tr><td valign="top">Unremovable:</td><td valign="top">${p["unremovable"]}</td></tr>
|
||||||
|
% endif
|
||||||
|
% if p["reboot_required" != ""]:
|
||||||
|
<tr><td valign="top">Reboot-Required:</td><td valign="top">${p["reboot_required"]}</td></tr>
|
||||||
|
% endif
|
||||||
|
% if p["summary"] != "":
|
||||||
|
<tr><td valign="top">Summary:</td><td valign="top">${p["summary"]}</td></tr>
|
||||||
|
% endif
|
||||||
|
% if p["description"] != "":
|
||||||
|
<tr><td valign="top">Description:</td><td valign="top">${p["description"]}</td></tr>
|
||||||
|
% endif
|
||||||
|
% if p["install_instructions"] != "":
|
||||||
|
<tr><td valign="top">Install Instructions:</td><td valign="top">${p["install_instructions"]}</td></tr>
|
||||||
|
% endif
|
||||||
|
% if p["warnings"] != "":
|
||||||
|
<tr><td valign="top">Warnings:</td><td valign="top">${p["warnings"]}</td></tr>
|
||||||
|
% endif
|
||||||
|
% if p["repostate"] == "Applied":
|
||||||
|
<tr>
|
||||||
|
<td valign="top">Actions:</td>
|
||||||
|
<td valign="top"><a href="/patch/remove/${patch_id}">Remove</a></td>
|
||||||
|
</tr>
|
||||||
|
% endif
|
||||||
|
% if p["repostate"] == "Available":
|
||||||
|
<tr>
|
||||||
|
<td valign="top">Actions:</td>
|
||||||
|
<td valign="top"><a href="/patch/apply/${patch_id}">Apply</a><br>
|
||||||
|
<a href="/patch/delete/${patch_id}">Delete</a></td>
|
||||||
|
</tr>
|
||||||
|
% endif
|
||||||
|
</table>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</%def>
|
||||||
|
|
95
sw-patch/cgcs-patch/cgcs_patch/templates/query.xml
Normal file
95
sw-patch/cgcs-patch/cgcs_patch/templates/query.xml
Normal file
@ -0,0 +1,95 @@
|
|||||||
|
% if not pd is UNDEFINED:
|
||||||
|
<pd>
|
||||||
|
% if len(pd) > 0:
|
||||||
|
% for patch_id in sorted(pd.keys()):
|
||||||
|
${patchelem(patch_id)}
|
||||||
|
% endfor
|
||||||
|
% endif
|
||||||
|
</pd>
|
||||||
|
% endif
|
||||||
|
% if not info is UNDEFINED or not warning is UNDEFINED or not error is UNDEFINED:
|
||||||
|
<info>
|
||||||
|
% if not info is UNDEFINED and len(info) > 0:
|
||||||
|
${info}
|
||||||
|
% endif
|
||||||
|
</info>
|
||||||
|
<warning>
|
||||||
|
% if not warning is UNDEFINED and len(warning) > 0:
|
||||||
|
${warning}
|
||||||
|
% endif
|
||||||
|
</warning>
|
||||||
|
<error>
|
||||||
|
% if not error is UNDEFINED and len(error) > 0:
|
||||||
|
${error}
|
||||||
|
% endif
|
||||||
|
</error>
|
||||||
|
% endif
|
||||||
|
<%def name="patchelem(patch_id)">\
|
||||||
|
<%p = pd[patch_id] %>\
|
||||||
|
<patch>
|
||||||
|
<patch_id>
|
||||||
|
${patch_id}
|
||||||
|
</patch_id>
|
||||||
|
<status>
|
||||||
|
% if p["status"] != "":
|
||||||
|
${p["status"]}
|
||||||
|
% endif
|
||||||
|
</status>
|
||||||
|
<sw_version>
|
||||||
|
% if p["sw_version"] != "":
|
||||||
|
${p["sw_version"]}
|
||||||
|
% endif
|
||||||
|
</sw_version>
|
||||||
|
<repostate>
|
||||||
|
% if p["repostate"] != "":
|
||||||
|
${p["repostate"]}
|
||||||
|
% endif
|
||||||
|
</repostate>
|
||||||
|
<patchstate>
|
||||||
|
% if p["patchstate"] != "":
|
||||||
|
${p["patchstate"]}
|
||||||
|
% endif
|
||||||
|
</patchstate>
|
||||||
|
<status>
|
||||||
|
% if p["status"] != "":
|
||||||
|
${p["status"]}
|
||||||
|
% endif
|
||||||
|
</status>
|
||||||
|
<unremovable>
|
||||||
|
% if p["unremovable"] != "":
|
||||||
|
${p["unremovable"]}
|
||||||
|
% endif
|
||||||
|
</unremovable>
|
||||||
|
<reboot_required>
|
||||||
|
% if p["reboot_required"] != "":
|
||||||
|
${p["reboot_required"]}
|
||||||
|
% endif
|
||||||
|
</reboot_required>
|
||||||
|
<summary>
|
||||||
|
% if p["summary"] != "":
|
||||||
|
${p["summary"]}
|
||||||
|
% endif
|
||||||
|
</summary>
|
||||||
|
<description>
|
||||||
|
% if p["description"] != "":
|
||||||
|
${p["description"]}
|
||||||
|
% endif
|
||||||
|
</description>
|
||||||
|
<install_instructions>
|
||||||
|
% if p["install_instructions"] != "":
|
||||||
|
${p["install_instructions"]}
|
||||||
|
% endif
|
||||||
|
</install_instructions>
|
||||||
|
<warnings>
|
||||||
|
% if p["warnings"] != "":
|
||||||
|
${p["warnings"]}
|
||||||
|
% endif
|
||||||
|
</warnings>
|
||||||
|
<requires>
|
||||||
|
% if "requires" in p and len(p["requires"]) > 0:
|
||||||
|
% for req in sorted(p["requires"]):
|
||||||
|
<patch>${req}</patch>
|
||||||
|
% endfor
|
||||||
|
% endif
|
||||||
|
</requires>
|
||||||
|
</patch></%def>
|
32
sw-patch/cgcs-patch/cgcs_patch/templates/query_agents.html
Normal file
32
sw-patch/cgcs-patch/cgcs_patch/templates/query_agents.html
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
|
||||||
|
<br>
|
||||||
|
<table border="2" style="width:300px">
|
||||||
|
<tr>
|
||||||
|
<th>Hostname</th>
|
||||||
|
<th>IP</th>
|
||||||
|
<th>Patch Patch?</th>
|
||||||
|
<th>Requires Reboot</th>
|
||||||
|
<th>Time since last ack</th>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
% for agent in data:
|
||||||
|
${agentrow(agent)}
|
||||||
|
% endfor
|
||||||
|
</table>
|
||||||
|
|
||||||
|
<br><br>
|
||||||
|
<a href="/patch/query">Show all</a><br>
|
||||||
|
<a href="/patch/query?show=applied">Show applied</a><br>
|
||||||
|
<a href="/patch/query?show=available">Show available</a><br>
|
||||||
|
|
||||||
|
|
||||||
|
<%def name="agentrow(agent)">
|
||||||
|
<tr>
|
||||||
|
<td>${agent["ip"]}</td>
|
||||||
|
<td>${agent["hostname"]}</td>
|
||||||
|
<td>${agent["patch_current"]}</td>
|
||||||
|
<td>${agent["requires_reboot"]}</td>
|
||||||
|
<td>${agent["secs_since_ack"]}</td>
|
||||||
|
</tr>
|
||||||
|
</%def>
|
||||||
|
|
75
sw-patch/cgcs-patch/cgcs_patch/templates/query_hosts.xml
Normal file
75
sw-patch/cgcs-patch/cgcs_patch/templates/query_hosts.xml
Normal file
@ -0,0 +1,75 @@
|
|||||||
|
% if not data is UNDEFINED and len(data) > 0:
|
||||||
|
<data>
|
||||||
|
% for host in data:
|
||||||
|
${hostelem(host)}
|
||||||
|
% endfor
|
||||||
|
</data>
|
||||||
|
% endif
|
||||||
|
<%def name="hostelem(host)">\
|
||||||
|
<%h = host %>\
|
||||||
|
<host>
|
||||||
|
<hostname>
|
||||||
|
% if h["hostname"] != "":
|
||||||
|
${h["hostname"]}
|
||||||
|
% endif
|
||||||
|
</hostname>
|
||||||
|
<requires_reboot>
|
||||||
|
% if h["requires_reboot"] != "":
|
||||||
|
${h["requires_reboot"]}
|
||||||
|
% endif
|
||||||
|
</requires_reboot>
|
||||||
|
<nodetype>
|
||||||
|
% if h["nodetype"] != "":
|
||||||
|
${h["nodetype"]}
|
||||||
|
% endif
|
||||||
|
</nodetype>
|
||||||
|
<ip>
|
||||||
|
% if h["ip"] != "":
|
||||||
|
${h["ip"]}
|
||||||
|
% endif
|
||||||
|
</ip>
|
||||||
|
<missing_pkgs>
|
||||||
|
% if "missing_pkgs" in h and len(h["missing_pkgs"]) > 0:
|
||||||
|
% for pkg in sorted(h["missing_pkgs"]):
|
||||||
|
<pkg>${pkg}</pkg>
|
||||||
|
% endfor
|
||||||
|
% endif
|
||||||
|
</missing_pkgs>
|
||||||
|
<installed>
|
||||||
|
% if "installed" in h and len(h["installed"]) > 0:
|
||||||
|
% for pkg in sorted(h["installed"]):
|
||||||
|
<pkg>
|
||||||
|
<name>${pkg}</name>
|
||||||
|
<pkgname>${h["installed"][pkg]}</pkgname>
|
||||||
|
</pkg>
|
||||||
|
% endfor
|
||||||
|
% endif
|
||||||
|
</installed>
|
||||||
|
<to_remove>
|
||||||
|
% if "to_remove" in h and len(h["to_remove"]) > 0:
|
||||||
|
% for pkg in sorted(h["to_remove"]):
|
||||||
|
<pkg>${pkg}</pkg>
|
||||||
|
% endfor
|
||||||
|
% endif
|
||||||
|
</to_remove>
|
||||||
|
<secs_since_ack>
|
||||||
|
% if h["secs_since_ack"] != "":
|
||||||
|
${h["secs_since_ack"]}
|
||||||
|
% endif
|
||||||
|
</secs_since_ack>
|
||||||
|
<patch_failed>
|
||||||
|
% if h["patch_failed"] != "":
|
||||||
|
${h["patch_failed"]}
|
||||||
|
% endif
|
||||||
|
</patch_failed>
|
||||||
|
<stale_details>
|
||||||
|
% if h["stale_details"] != "":
|
||||||
|
${h["stale_details"]}
|
||||||
|
% endif
|
||||||
|
</stale_details>
|
||||||
|
<patch_current>
|
||||||
|
% if h["patch_current"] != "":
|
||||||
|
${h["patch_current"]}
|
||||||
|
% endif
|
||||||
|
</patch_current>
|
||||||
|
</host></%def>
|
83
sw-patch/cgcs-patch/cgcs_patch/templates/show.html
Normal file
83
sw-patch/cgcs-patch/cgcs_patch/templates/show.html
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
|
||||||
|
<br>
|
||||||
|
% if not metadata is UNDEFINED and len(metadata) > 0:
|
||||||
|
% for patch_id in sorted(metadata.keys()):
|
||||||
|
${showpatch(patch_id)}
|
||||||
|
% endfor
|
||||||
|
% endif
|
||||||
|
|
||||||
|
% if not info is UNDEFINED and len(info) > 0:
|
||||||
|
<p>${info}</p>
|
||||||
|
% endif
|
||||||
|
|
||||||
|
% if not warning is UNDEFINED and len(warning) > 0:
|
||||||
|
<p>Warning:<br>${warning}</p>
|
||||||
|
% endif
|
||||||
|
|
||||||
|
% if not error is UNDEFINED and len(error) > 0:
|
||||||
|
<p>Error:<br>${error}</p>
|
||||||
|
% endif
|
||||||
|
|
||||||
|
<br><br>
|
||||||
|
<a href="/patch/query">Show all</a><br>
|
||||||
|
<a href="/patch/query?show=applied">Show applied</a><br>
|
||||||
|
<a href="/patch/query?show=available">Show available</a><br>
|
||||||
|
<a href="/patch/query_hosts">Query Hosts</a><br>
|
||||||
|
|
||||||
|
<br><br>
|
||||||
|
<form action="/patch/upload" method="POST" enctype="multipart/form-data">
|
||||||
|
<input type="file" name="file"/>
|
||||||
|
<button type="submit">Upload Patch</button>
|
||||||
|
</form>
|
||||||
|
|
||||||
|
<%def name="showpatch(patch_id)">
|
||||||
|
<%
|
||||||
|
p = metadata[patch_id]
|
||||||
|
%>
|
||||||
|
<h2>${patch_id}</h2>
|
||||||
|
<table border="2">
|
||||||
|
% if p["repostate"] != "":
|
||||||
|
<tr><td valign="top">Repo State:</td><td valign="top">${p["repostate"]}</td></tr>
|
||||||
|
% endif
|
||||||
|
% if p["patchstate"] != "":
|
||||||
|
<tr><td valign="top">Patch State:</td><td valign="top">${p["patchstate"]}</td></tr>
|
||||||
|
% endif
|
||||||
|
% if p["status"] != "":
|
||||||
|
<tr><td valign="top">Status:</td><td valign="top">${p["status"]}</td></tr>
|
||||||
|
% endif
|
||||||
|
% if p["unremovable"] != "":
|
||||||
|
<tr><td valign="top">Unremovable:</td><td valign="top">${p["unremovable"]}</td></tr>
|
||||||
|
% endif
|
||||||
|
% if p["reboot_required"] != "":
|
||||||
|
<tr><td valign="top">Reboot-Required:</td><td valign="top">${p["reboot_required"]}</td></tr>
|
||||||
|
% endif
|
||||||
|
% if p["summary"] != "":
|
||||||
|
<tr><td valign="top">Summary:</td><td valign="top">${p["summary"]}</td></tr>
|
||||||
|
% endif
|
||||||
|
% if p["description"] != "":
|
||||||
|
<tr><td valign="top">Description:</td><td valign="top">${p["description"]}</td></tr>
|
||||||
|
% endif
|
||||||
|
% if p["install_instructions"] != "":
|
||||||
|
<tr><td valign="top">Install Instructions:</td><td valign="top">${p["install_instructions"]}</td></tr>
|
||||||
|
% endif
|
||||||
|
% if p["warnings"] != "":
|
||||||
|
<tr><td valign="top">Warnings:</td><td valign="top">${p["warnings"]}</td></tr>
|
||||||
|
% endif
|
||||||
|
% if "requires" in p and len(p["requires"]) > 0:
|
||||||
|
<tr><td valign="top">Requires:</td><td valign="top">
|
||||||
|
% for req in sorted(p["requires"]):
|
||||||
|
${req}<br>
|
||||||
|
% endfor
|
||||||
|
</td></tr>
|
||||||
|
% endif
|
||||||
|
% if not contents is UNDEFINED and patch_id in contents:
|
||||||
|
<tr><td valign="top">Contents:</td><td valign="top">
|
||||||
|
% for pkg in sorted(contents[patch_id]):
|
||||||
|
${pkg}<br>
|
||||||
|
% endfor
|
||||||
|
</td></tr>
|
||||||
|
% endif
|
||||||
|
|
||||||
|
</table>
|
||||||
|
</%def>
|
||||||
|
|
92
sw-patch/cgcs-patch/cgcs_patch/templates/show.xml
Normal file
92
sw-patch/cgcs-patch/cgcs_patch/templates/show.xml
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
<contents>
|
||||||
|
% if not contents is UNDEFINED and len(contents) > 0:
|
||||||
|
% for patch_id in sorted(contents.keys()):
|
||||||
|
<patch id=${patch_id}>
|
||||||
|
% for pkg in sorted(contents[patch_id]):
|
||||||
|
<pkg>${pkg}</pkg>
|
||||||
|
% endfor
|
||||||
|
</patch>
|
||||||
|
% endfor
|
||||||
|
% endif
|
||||||
|
</contents>
|
||||||
|
<error>
|
||||||
|
% if not error is UNDEFINED and len(error) > 0:
|
||||||
|
${error}
|
||||||
|
% endif
|
||||||
|
</error>
|
||||||
|
<metadata>
|
||||||
|
% if not metadata is UNDEFINED and len(metadata) > 0:
|
||||||
|
% for patch_id in sorted(metadata.keys()):
|
||||||
|
${showpatch(patch_id)}
|
||||||
|
% endfor
|
||||||
|
% endif
|
||||||
|
</metadata>
|
||||||
|
<%def name="showpatch(patch_id)">\
|
||||||
|
<% p = metadata[patch_id] %>\
|
||||||
|
<patch>
|
||||||
|
<patch_id>
|
||||||
|
${patch_id}
|
||||||
|
</patch_id>
|
||||||
|
<status>
|
||||||
|
% if p["status"] != "":
|
||||||
|
${p["status"]}
|
||||||
|
% endif
|
||||||
|
</status>
|
||||||
|
<unremovable>
|
||||||
|
% if p["unremovable"] != "":
|
||||||
|
${p["unremovable"]}
|
||||||
|
% endif
|
||||||
|
</unremovable>
|
||||||
|
<reboot_required>
|
||||||
|
% if p["reboot_required"] != "":
|
||||||
|
${p["reboot_required"]}
|
||||||
|
% endif
|
||||||
|
</reboot_required>
|
||||||
|
<sw_version>
|
||||||
|
% if p["sw_version"] != "":
|
||||||
|
${p["sw_version"]}
|
||||||
|
% endif
|
||||||
|
</sw_version>
|
||||||
|
<repostate>
|
||||||
|
% if p["repostate"] != "":
|
||||||
|
${p["repostate"]}
|
||||||
|
% endif
|
||||||
|
</repostate>
|
||||||
|
<patchstate>
|
||||||
|
% if p["patchstate"] != "":
|
||||||
|
${p["patchstate"]}
|
||||||
|
% endif
|
||||||
|
</patchstate>
|
||||||
|
<status>
|
||||||
|
% if p["status"] != "":
|
||||||
|
${p["status"]}
|
||||||
|
% endif
|
||||||
|
</status>
|
||||||
|
<summary>
|
||||||
|
% if p["summary"] != "":
|
||||||
|
${p["summary"]}
|
||||||
|
% endif
|
||||||
|
</summary>
|
||||||
|
<description>
|
||||||
|
% if p["description"] != "":
|
||||||
|
${p["description"]}
|
||||||
|
% endif
|
||||||
|
</description>
|
||||||
|
<install_instructions>
|
||||||
|
% if p["install_instructions"] != "":
|
||||||
|
${p["install_instructions"]}
|
||||||
|
% endif
|
||||||
|
</install_instructions>
|
||||||
|
<warnings>
|
||||||
|
% if p["warnings"] != "":
|
||||||
|
${p["warnings"]}
|
||||||
|
% endif
|
||||||
|
</warnings>
|
||||||
|
<requires>
|
||||||
|
% if "requires" in p and len(p["requires"]) > 0:
|
||||||
|
% for req in sorted(p["requires"]):
|
||||||
|
<patch>${req}</patch>
|
||||||
|
% endfor
|
||||||
|
% endif
|
||||||
|
</requires>
|
||||||
|
</patch></%def>
|
0
sw-patch/cgcs-patch/cgcs_patch/tests/__init__.py
Normal file
0
sw-patch/cgcs-patch/cgcs_patch/tests/__init__.py
Normal file
3422
sw-patch/cgcs-patch/cgcs_patch/tests/md5test.txt
Normal file
3422
sw-patch/cgcs-patch/cgcs_patch/tests/md5test.txt
Normal file
File diff suppressed because it is too large
Load Diff
25
sw-patch/cgcs-patch/cgcs_patch/tests/test_basics.py
Normal file
25
sw-patch/cgcs-patch/cgcs_patch/tests/test_basics.py
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#
|
||||||
|
# Copyright (c) 2019 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
|
||||||
|
import mock
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import testtools
|
||||||
|
|
||||||
|
sys.modules['rpm'] = mock.Mock()
|
||||||
|
|
||||||
|
import cgcs_patch.patch_functions # noqa: E402
|
||||||
|
|
||||||
|
|
||||||
|
class CgcsPatchTestCase(testtools.TestCase):
|
||||||
|
|
||||||
|
def test_cgcs_patch_functions_get_md5(self):
|
||||||
|
md5testfile = os.path.join(os.path.dirname(__file__), 'md5test.txt')
|
||||||
|
expected_result = 0x7179a07a8a5c50a3fc9f1971f1ec317f
|
||||||
|
|
||||||
|
md5result = cgcs_patch.patch_functions.get_md5(md5testfile)
|
||||||
|
|
||||||
|
self.assertEqual(expected_result, md5result)
|
30
sw-patch/cgcs-patch/cgcs_patch/tests/test_patch_agent.py
Normal file
30
sw-patch/cgcs-patch/cgcs_patch/tests/test_patch_agent.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#
|
||||||
|
# Copyright (c) 2019 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
|
||||||
|
import mock
|
||||||
|
import six # pylint: disable=unused-import
|
||||||
|
import sys
|
||||||
|
import testtools
|
||||||
|
|
||||||
|
sys.modules['rpm'] = mock.Mock()
|
||||||
|
sys.modules['dnf'] = mock.Mock()
|
||||||
|
sys.modules['dnf.callback'] = mock.Mock()
|
||||||
|
sys.modules['dnf.comps'] = mock.Mock()
|
||||||
|
sys.modules['dnf.exceptions'] = mock.Mock()
|
||||||
|
sys.modules['dnf.rpm'] = mock.Mock()
|
||||||
|
sys.modules['dnf.sack'] = mock.Mock()
|
||||||
|
sys.modules['dnf.transaction'] = mock.Mock()
|
||||||
|
sys.modules['libdnf'] = mock.Mock()
|
||||||
|
sys.modules['libdnf.transaction'] = mock.Mock()
|
||||||
|
|
||||||
|
import cgcs_patch.patch_agent # noqa: E402
|
||||||
|
|
||||||
|
|
||||||
|
class CgcsPatchAgentTestCase(testtools.TestCase):
|
||||||
|
|
||||||
|
def test_cgcs_patch_agent_instantiate(self):
|
||||||
|
# pylint: disable=unused-variable
|
||||||
|
pc = cgcs_patch.patch_agent.PatchAgent() # noqa: F841
|
@ -0,0 +1,22 @@
|
|||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#
|
||||||
|
# Copyright (c) 2019 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
|
||||||
|
import mock
|
||||||
|
import six # pylint: disable=unused-import
|
||||||
|
import sys
|
||||||
|
import testtools
|
||||||
|
|
||||||
|
sys.modules['rpm'] = mock.Mock()
|
||||||
|
|
||||||
|
import cgcs_patch.patch_controller # noqa: E402
|
||||||
|
|
||||||
|
|
||||||
|
class CgcsPatchControllerTestCase(testtools.TestCase):
|
||||||
|
|
||||||
|
@mock.patch('six.moves.builtins.open')
|
||||||
|
def test_cgcs_patch_controller_instantiate(self, mock_open): # pylint: disable=unused-argument
|
||||||
|
# pylint: disable=unused-variable
|
||||||
|
pc = cgcs_patch.patch_controller.PatchController() # noqa: F841
|
146
sw-patch/cgcs-patch/cgcs_patch/tests/test_patch_utils.py
Normal file
146
sw-patch/cgcs-patch/cgcs_patch/tests/test_patch_utils.py
Normal file
@ -0,0 +1,146 @@
|
|||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#
|
||||||
|
# Copyright (c) 2019 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
|
||||||
|
import mock
|
||||||
|
import socket
|
||||||
|
import testtools
|
||||||
|
|
||||||
|
import cgcs_patch.constants
|
||||||
|
import cgcs_patch.patch_functions
|
||||||
|
import cgcs_patch.utils
|
||||||
|
|
||||||
|
|
||||||
|
class CgcsPatchUtilsTestCase(testtools.TestCase):
|
||||||
|
|
||||||
|
def test_if_nametoindex_loopback(self):
|
||||||
|
result = cgcs_patch.utils.if_nametoindex('lo')
|
||||||
|
self.assertGreater(result, 0)
|
||||||
|
|
||||||
|
def test_if_nametoindex_failure(self):
|
||||||
|
result = cgcs_patch.utils.if_nametoindex('xfakeifx')
|
||||||
|
self.assertEqual(result, 0)
|
||||||
|
|
||||||
|
def test_gethostbyname(self):
|
||||||
|
result = cgcs_patch.utils.gethostbyname('localhost')
|
||||||
|
print("gethostbyname returned %s for localhost" % result)
|
||||||
|
self.assertIn(result, ['127.0.0.1', '::1'])
|
||||||
|
|
||||||
|
def test_gethostbyname_failure(self):
|
||||||
|
result = cgcs_patch.utils.gethostbyname('xfakehostx')
|
||||||
|
print("gethostbyname returned %s for xfakehostx" % result)
|
||||||
|
self.assertIsNone(result)
|
||||||
|
|
||||||
|
@mock.patch('cgcs_patch.utils.gethostbyname')
|
||||||
|
def test_get_management_version_ipv4(self, mock_gethostbyname):
|
||||||
|
mock_gethostbyname.return_value = '192.168.204.2'
|
||||||
|
expected_result = cgcs_patch.constants.ADDRESS_VERSION_IPV4
|
||||||
|
|
||||||
|
result = cgcs_patch.utils.get_management_version()
|
||||||
|
self.assertEqual(expected_result, result)
|
||||||
|
|
||||||
|
@mock.patch('cgcs_patch.utils.gethostbyname')
|
||||||
|
def test_get_management_version_ipv6(self, mock_gethostbyname):
|
||||||
|
mock_gethostbyname.return_value = 'fe80::2e44:fdff:fe84:5479'
|
||||||
|
expected_result = cgcs_patch.constants.ADDRESS_VERSION_IPV6
|
||||||
|
|
||||||
|
result = cgcs_patch.utils.get_management_version()
|
||||||
|
self.assertEqual(expected_result, result)
|
||||||
|
|
||||||
|
@mock.patch('cgcs_patch.utils.gethostbyname')
|
||||||
|
def test_get_management_version_ipv4_default(self, mock_gethostbyname):
|
||||||
|
mock_gethostbyname.return_value = None
|
||||||
|
expected_result = cgcs_patch.constants.ADDRESS_VERSION_IPV4
|
||||||
|
|
||||||
|
result = cgcs_patch.utils.get_management_version()
|
||||||
|
self.assertEqual(expected_result, result)
|
||||||
|
|
||||||
|
@mock.patch('cgcs_patch.utils.gethostbyname')
|
||||||
|
def test_get_management_family_ipv4(self, mock_gethostbyname):
|
||||||
|
mock_gethostbyname.return_value = '192.168.204.2'
|
||||||
|
expected_result = socket.AF_INET
|
||||||
|
|
||||||
|
result = cgcs_patch.utils.get_management_family()
|
||||||
|
self.assertEqual(expected_result, result)
|
||||||
|
|
||||||
|
@mock.patch('cgcs_patch.utils.gethostbyname')
|
||||||
|
def test_get_management_family_ipv6(self, mock_gethostbyname):
|
||||||
|
mock_gethostbyname.return_value = 'fe80::2e44:fdff:fe84:5479'
|
||||||
|
expected_result = socket.AF_INET6
|
||||||
|
|
||||||
|
result = cgcs_patch.utils.get_management_family()
|
||||||
|
self.assertEqual(expected_result, result)
|
||||||
|
|
||||||
|
@mock.patch('cgcs_patch.utils.gethostbyname')
|
||||||
|
def test_get_management_version_ipv4_int(self, mock_gethostbyname):
|
||||||
|
mock_gethostbyname.return_value = 0xc0a8cc02
|
||||||
|
expected_result = socket.AF_INET
|
||||||
|
|
||||||
|
result = cgcs_patch.utils.get_management_family()
|
||||||
|
self.assertEqual(expected_result, result)
|
||||||
|
|
||||||
|
@mock.patch('cgcs_patch.utils.gethostbyname')
|
||||||
|
def test_get_versioned_address_all_ipv4(self, mock_gethostbyname):
|
||||||
|
mock_gethostbyname.return_value = '192.168.204.2'
|
||||||
|
expected_result = '0.0.0.0'
|
||||||
|
|
||||||
|
result = cgcs_patch.utils.get_versioned_address_all()
|
||||||
|
self.assertEqual(expected_result, result)
|
||||||
|
|
||||||
|
@mock.patch('cgcs_patch.utils.gethostbyname')
|
||||||
|
def test_get_versioned_address_all_ipv6(self, mock_gethostbyname):
|
||||||
|
mock_gethostbyname.return_value = 'fe80::2e44:fdff:fe84:5479'
|
||||||
|
expected_result = '::'
|
||||||
|
|
||||||
|
result = cgcs_patch.utils.get_versioned_address_all()
|
||||||
|
self.assertEqual(expected_result, result)
|
||||||
|
|
||||||
|
def test_ip_to_url_ipv4(self):
|
||||||
|
ip = '192.168.204.2'
|
||||||
|
expected_result = ip
|
||||||
|
|
||||||
|
result = cgcs_patch.utils.ip_to_url(ip)
|
||||||
|
self.assertEqual(expected_result, result)
|
||||||
|
|
||||||
|
def test_ip_to_url_ipv6(self):
|
||||||
|
ip = 'fe80::2e44:fdff:fe84:5479'
|
||||||
|
expected_result = '[%s]' % ip
|
||||||
|
|
||||||
|
result = cgcs_patch.utils.ip_to_url(ip)
|
||||||
|
self.assertEqual(expected_result, result)
|
||||||
|
|
||||||
|
def test_ip_to_url_invalid(self):
|
||||||
|
ip = 'not-an-ip'
|
||||||
|
expected_result = ip
|
||||||
|
|
||||||
|
result = cgcs_patch.utils.ip_to_url(ip)
|
||||||
|
self.assertEqual(expected_result, result)
|
||||||
|
|
||||||
|
def test_ip_to_versioned_localhost_ipv4(self):
|
||||||
|
ip = '192.168.204.2'
|
||||||
|
expected_result = 'localhost'
|
||||||
|
|
||||||
|
result = cgcs_patch.utils.ip_to_versioned_localhost(ip)
|
||||||
|
self.assertEqual(expected_result, result)
|
||||||
|
|
||||||
|
def test_ip_to_versioned_localhost_ipv6(self):
|
||||||
|
ip = 'fe80::2e44:fdff:fe84:5479'
|
||||||
|
expected_result = '::1'
|
||||||
|
|
||||||
|
result = cgcs_patch.utils.ip_to_versioned_localhost(ip)
|
||||||
|
self.assertEqual(expected_result, result)
|
||||||
|
|
||||||
|
def test_parse_pkgver(self):
|
||||||
|
versions = {
|
||||||
|
'0:1.2.3-r4': ('0', '1.2.3', 'r4'),
|
||||||
|
'4.3.2-1': (None, '4.3.2', '1'),
|
||||||
|
'8.1.4': (None, '8.1.4', None),
|
||||||
|
'5:7.5.3': ('5', '7.5.3', None),
|
||||||
|
'This is a weird version string': (None, 'This is a weird version string', None),
|
||||||
|
}
|
||||||
|
|
||||||
|
for ver, expected in versions.items():
|
||||||
|
result = cgcs_patch.patch_functions.parse_pkgver(ver)
|
||||||
|
self.assertEqual(result, expected)
|
83
sw-patch/cgcs-patch/cgcs_patch/utils.py
Normal file
83
sw-patch/cgcs-patch/cgcs_patch/utils.py
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
"""
|
||||||
|
Copyright (c) 2016-2019 Wind River Systems, Inc.
|
||||||
|
|
||||||
|
SPDX-License-Identifier: Apache-2.0
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from netaddr import IPAddress
|
||||||
|
import cgcs_patch.constants as constants
|
||||||
|
import socket
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Python3
|
||||||
|
from socket import if_nametoindex as if_nametoindex_func
|
||||||
|
except ImportError:
|
||||||
|
# Python2
|
||||||
|
import ctypes
|
||||||
|
import ctypes.util
|
||||||
|
|
||||||
|
libc = ctypes.CDLL(ctypes.util.find_library('c'))
|
||||||
|
if_nametoindex_func = libc.if_nametoindex
|
||||||
|
|
||||||
|
|
||||||
|
def if_nametoindex(name):
|
||||||
|
try:
|
||||||
|
return if_nametoindex_func(name)
|
||||||
|
except Exception:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def gethostbyname(hostname):
|
||||||
|
""" gethostbyname with IPv6 support """
|
||||||
|
try:
|
||||||
|
return socket.getaddrinfo(hostname, None)[0][4][0]
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def get_management_version():
|
||||||
|
""" Determine whether management is IPv4 or IPv6 """
|
||||||
|
controller_ip_string = gethostbyname(constants.CONTROLLER_FLOATING_HOSTNAME)
|
||||||
|
if controller_ip_string:
|
||||||
|
controller_ip_address = IPAddress(controller_ip_string)
|
||||||
|
return controller_ip_address.version
|
||||||
|
else:
|
||||||
|
return constants.ADDRESS_VERSION_IPV4
|
||||||
|
|
||||||
|
|
||||||
|
def get_management_family():
|
||||||
|
ip_version = get_management_version()
|
||||||
|
if ip_version == constants.ADDRESS_VERSION_IPV6:
|
||||||
|
return socket.AF_INET6
|
||||||
|
else:
|
||||||
|
return socket.AF_INET
|
||||||
|
|
||||||
|
|
||||||
|
def get_versioned_address_all():
|
||||||
|
ip_version = get_management_version()
|
||||||
|
if ip_version == constants.ADDRESS_VERSION_IPV6:
|
||||||
|
return "::"
|
||||||
|
else:
|
||||||
|
return "0.0.0.0"
|
||||||
|
|
||||||
|
|
||||||
|
def ip_to_url(ip_address_string):
|
||||||
|
""" Add brackets if an IPv6 address """
|
||||||
|
try:
|
||||||
|
ip_address = IPAddress(ip_address_string)
|
||||||
|
if ip_address.version == constants.ADDRESS_VERSION_IPV6:
|
||||||
|
return "[%s]" % ip_address_string
|
||||||
|
else:
|
||||||
|
return ip_address_string
|
||||||
|
except Exception:
|
||||||
|
return ip_address_string
|
||||||
|
|
||||||
|
|
||||||
|
def ip_to_versioned_localhost(ip_address_string):
|
||||||
|
""" Add brackets if an IPv6 address """
|
||||||
|
ip_address = IPAddress(ip_address_string)
|
||||||
|
if ip_address.version == constants.ADDRESS_VERSION_IPV6:
|
||||||
|
return "::1"
|
||||||
|
else:
|
||||||
|
return "localhost"
|
34
sw-patch/cgcs-patch/cgcs_patch_id/README.txt
Normal file
34
sw-patch/cgcs-patch/cgcs_patch_id/README.txt
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
Intended to run on a single build server. Currently yow-cgts2-lx
|
||||||
|
|
||||||
|
# On other build servers
|
||||||
|
mkdir -p /localdisk/designer/jenkins/bin
|
||||||
|
cp patch_id_allocator_client.py /localdisk/designer/jenkins/bin
|
||||||
|
|
||||||
|
|
||||||
|
# On the intended server: e.g. yow-cgts2-lx
|
||||||
|
mkdir -p /localdisk/designer/jenkins/bin
|
||||||
|
cp *py /localdisk/designer/jenkins/bin/
|
||||||
|
mkdir -p /localdisk/designer/jenkins/patch_ids
|
||||||
|
sudo cp patch_id_allocator_server.conf /etc/init
|
||||||
|
sudo initctl reload-configuration
|
||||||
|
sudo start script
|
||||||
|
|
||||||
|
# Change to a different server
|
||||||
|
edit patch_id_allocator_client.py
|
||||||
|
change the line ...
|
||||||
|
server = 'yow-cgts2-lx.wrs.com'
|
||||||
|
|
||||||
|
# TODO:
|
||||||
|
Need to back up the /localdisk/designer/jenkins/patch_ids directory
|
||||||
|
|
||||||
|
# Quick test
|
||||||
|
Point your browser at this url:
|
||||||
|
http://yow-cgts2-lx:8888/get_patch_id
|
||||||
|
|
||||||
|
expected result is:
|
||||||
|
CGCS_None_PATCH_0000
|
||||||
|
|
||||||
|
on each reload of the page, the number increments:
|
||||||
|
CGCS_None_PATCH_0001
|
||||||
|
CGCS_None_PATCH_0002
|
||||||
|
....
|
50
sw-patch/cgcs-patch/cgcs_patch_id/patch_id_allocator.py
Executable file
50
sw-patch/cgcs-patch/cgcs_patch_id/patch_id_allocator.py
Executable file
@ -0,0 +1,50 @@
|
|||||||
|
#!/usr/bin/python
|
||||||
|
#
|
||||||
|
# Copyright (c) 2013-2014 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
import fcntl
|
||||||
|
import string
|
||||||
|
import time
|
||||||
|
|
||||||
|
directory = "/localdisk/designer/jenkins/patch_ids"
|
||||||
|
|
||||||
|
|
||||||
|
def get_unique_id(filename, digits=4):
|
||||||
|
counter = 1
|
||||||
|
path = "%s/%s" % (directory, filename)
|
||||||
|
try:
|
||||||
|
# open for update
|
||||||
|
file = open(path, "r+")
|
||||||
|
fcntl.lockf(file, fcntl.LOCK_EX, digits)
|
||||||
|
counter = int(file.read(digits)) + 1
|
||||||
|
except IOError:
|
||||||
|
# create it
|
||||||
|
try:
|
||||||
|
file = open(path, "w")
|
||||||
|
fcntl.lockf(file, fcntl.LOCK_EX, digits)
|
||||||
|
except IOError:
|
||||||
|
print("creation of file '%s' failed" % path)
|
||||||
|
return -1
|
||||||
|
|
||||||
|
file.seek(0) # rewind
|
||||||
|
format = "%%0%dd" % digits
|
||||||
|
file.write(format % counter)
|
||||||
|
|
||||||
|
# Note: close releases lock
|
||||||
|
file.close()
|
||||||
|
|
||||||
|
return counter
|
||||||
|
|
||||||
|
|
||||||
|
def get_patch_id(version, prefix="CGCS", digits=4):
|
||||||
|
filename = "%s_%s_patchid" % (prefix, version)
|
||||||
|
id = get_unique_id(filename)
|
||||||
|
if id < 0:
|
||||||
|
return None
|
||||||
|
patch_id_format = "%%s_%%s_PATCH_%%0%dd" % digits
|
||||||
|
patch_id = patch_id_format % (prefix, version, id)
|
||||||
|
return patch_id
|
66
sw-patch/cgcs-patch/cgcs_patch_id/patch_id_allocator_client.py
Executable file
66
sw-patch/cgcs-patch/cgcs_patch_id/patch_id_allocator_client.py
Executable file
@ -0,0 +1,66 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
#
|
||||||
|
# Copyright (c) 2013-2014 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#
|
||||||
|
|
||||||
|
import getopt
|
||||||
|
import sys
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Python 2
|
||||||
|
from urllib import urlencode
|
||||||
|
from urllib2 import urlopen
|
||||||
|
except ImportError:
|
||||||
|
# Python 3
|
||||||
|
from urllib.parse import urlencode
|
||||||
|
from urllib.request import urlopen
|
||||||
|
|
||||||
|
|
||||||
|
opts = ['sw_version=', 'prefix=']
|
||||||
|
|
||||||
|
server = 'yow-cgts2-lx.wrs.com'
|
||||||
|
port = 8888
|
||||||
|
|
||||||
|
|
||||||
|
def request_patch_id(sw_version="1.01", prefix="CGCS"):
|
||||||
|
raw_parms = {}
|
||||||
|
raw_parms['sw_version'] = sw_version
|
||||||
|
raw_parms['prefix'] = prefix
|
||||||
|
print("raw_parms = %s" % str(raw_parms))
|
||||||
|
|
||||||
|
url = "http://%s:%d/get_patch_id" % (server, port)
|
||||||
|
params = urlencode(raw_parms)
|
||||||
|
response = urlopen(url, params).read()
|
||||||
|
return response
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
optlist, remainder = getopt.getopt(sys.argv[1:], '', opts)
|
||||||
|
|
||||||
|
sw_version = None
|
||||||
|
prefix = None
|
||||||
|
raw_parms = {}
|
||||||
|
|
||||||
|
print("optlist = %s" % str(optlist))
|
||||||
|
print("remainder = %s" % str(remainder))
|
||||||
|
for key, val in optlist:
|
||||||
|
print("key = %s, val = %s" % (key, val))
|
||||||
|
if key == '--sw_version':
|
||||||
|
sw_version = val
|
||||||
|
print("sw_version = %s" % sw_version)
|
||||||
|
raw_parms['sw_version'] = sw_version
|
||||||
|
|
||||||
|
if key == '--prefix':
|
||||||
|
prefix = val
|
||||||
|
print("prefix = %s" % prefix)
|
||||||
|
raw_parms['prefix'] = prefix
|
||||||
|
|
||||||
|
# response = request_patch_id(sw_version=sw_version, prefix=prefix)
|
||||||
|
response = request_patch_id(**raw_parms)
|
||||||
|
print("response = %s" % str(response))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
@ -0,0 +1,16 @@
|
|||||||
|
# upstart script for patch_id_allocator_server
|
||||||
|
#
|
||||||
|
# Intallation
|
||||||
|
# sudo cp patch_id_allocator_server.conf /etc/init
|
||||||
|
# sudo initctl reload-configuration
|
||||||
|
# sudo start script
|
||||||
|
|
||||||
|
description "patch_id service"
|
||||||
|
author "Scott Little <scott.little@windriver.com>"
|
||||||
|
|
||||||
|
start on runlevel [234]
|
||||||
|
stop on runlevel [0156]
|
||||||
|
|
||||||
|
chdir /tmp
|
||||||
|
exec /localdisk/designer/jenkins/bin/patch_id_allocator_server.py
|
||||||
|
respawn
|
45
sw-patch/cgcs-patch/cgcs_patch_id/patch_id_allocator_server.py
Executable file
45
sw-patch/cgcs-patch/cgcs_patch_id/patch_id_allocator_server.py
Executable file
@ -0,0 +1,45 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
#
|
||||||
|
# Copyright (c) 2013-2014 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import web
|
||||||
|
import patch_id_allocator as pida
|
||||||
|
|
||||||
|
|
||||||
|
port = 8888
|
||||||
|
|
||||||
|
urls = (
|
||||||
|
'/get_patch_id', 'get_patch_id',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class get_patch_id(object):
|
||||||
|
def GET(self):
|
||||||
|
data = web.input(sw_version=None, prefix="CGCS")
|
||||||
|
output = pida.get_patch_id(data.sw_version, data.prefix)
|
||||||
|
return output
|
||||||
|
|
||||||
|
def POST(self):
|
||||||
|
data = web.input(sw_version=None, prefix="CGCS")
|
||||||
|
output = pida.get_patch_id(data.sw_version, data.prefix)
|
||||||
|
return output
|
||||||
|
|
||||||
|
|
||||||
|
class MyApplication(web.application):
|
||||||
|
def run(self, port=8080, *middleware):
|
||||||
|
func = self.wsgifunc(*middleware)
|
||||||
|
return web.httpserver.runsimple(func, ('0.0.0.0', port))
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
app = MyApplication(urls, globals())
|
||||||
|
app.run(port=port)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
430
sw-patch/cgcs-patch/pylint.rc
Normal file
430
sw-patch/cgcs-patch/pylint.rc
Normal file
@ -0,0 +1,430 @@
|
|||||||
|
[MASTER]
|
||||||
|
|
||||||
|
# Specify a configuration file.
|
||||||
|
#rcfile=
|
||||||
|
|
||||||
|
# Python code to execute, usually for sys.path manipulation such as
|
||||||
|
# pygtk.require().
|
||||||
|
#init-hook=
|
||||||
|
|
||||||
|
# Profiled execution.
|
||||||
|
profile=no
|
||||||
|
|
||||||
|
# Add files or directories to the blacklist. They should be base names, not
|
||||||
|
# paths.
|
||||||
|
ignore=CVS
|
||||||
|
|
||||||
|
# Pickle collected data for later comparisons.
|
||||||
|
persistent=yes
|
||||||
|
|
||||||
|
# List of plugins (as comma separated values of python modules names) to load,
|
||||||
|
# usually to register additional checkers.
|
||||||
|
load-plugins=
|
||||||
|
|
||||||
|
# DEPRECATED
|
||||||
|
include-ids=no
|
||||||
|
|
||||||
|
# DEPRECATED
|
||||||
|
symbols=no
|
||||||
|
|
||||||
|
extension-pkg-whitelist=lxml
|
||||||
|
|
||||||
|
|
||||||
|
[MESSAGES CONTROL]
|
||||||
|
|
||||||
|
# Enable the message, report, category or checker with the given id(s). You can
|
||||||
|
# either give multiple identifier separated by comma (,) or put this option
|
||||||
|
# multiple time. See also the "--disable" option for examples.
|
||||||
|
#
|
||||||
|
# Python3 checker:
|
||||||
|
#
|
||||||
|
# E1601: print-statement
|
||||||
|
# E1602: parameter-unpacking
|
||||||
|
# E1603: unpacking-in-except
|
||||||
|
# E1604: old-raise-syntax
|
||||||
|
# E1605: backtick
|
||||||
|
# E1606: long-suffix
|
||||||
|
# E1607: old-ne-operator
|
||||||
|
# E1608: old-octal-literal
|
||||||
|
# E1609: import-star-module-level
|
||||||
|
# E1610: non-ascii-bytes-literal
|
||||||
|
# E1611: invalid-unicode-literal
|
||||||
|
# W1601: apply-builtin
|
||||||
|
# W1602: basestring-builtin
|
||||||
|
# W1603: buffer-builtin
|
||||||
|
# W1604: cmp-builtin
|
||||||
|
# W1605: coerce-builtin
|
||||||
|
# W1606: execfile-builtin
|
||||||
|
# W1607: file-builtin
|
||||||
|
# W1608: long-builtin
|
||||||
|
# W1609: raw_input-builtin
|
||||||
|
# W1610: reduce-builtin
|
||||||
|
# W1611: standarderror-builtin
|
||||||
|
# W1612: unicode-builtin
|
||||||
|
# W1613: xrange-builtin
|
||||||
|
# W1614: coerce-method
|
||||||
|
# W1615: delslice-method
|
||||||
|
# W1616: getslice-method
|
||||||
|
# W1617: setslice-method
|
||||||
|
# W1618: no-absolute-import
|
||||||
|
# W1619: old-division
|
||||||
|
# W1620: dict-iter-method
|
||||||
|
# W1621: dict-view-method
|
||||||
|
# W1622: next-method-called
|
||||||
|
# W1623: metaclass-assignment
|
||||||
|
# W1624: indexing-exception
|
||||||
|
# W1625: raising-string
|
||||||
|
# W1626: reload-builtin
|
||||||
|
# W1627: oct-method
|
||||||
|
# W1628: hex-method
|
||||||
|
# W1629: nonzero-method
|
||||||
|
# W1630: cmp-method
|
||||||
|
# W1632: input-builtin
|
||||||
|
# W1633: round-builtin
|
||||||
|
# W1634: intern-builtin
|
||||||
|
# W1635: unichr-builtin
|
||||||
|
# W1636: map-builtin-not-iterating
|
||||||
|
# W1637: zip-builtin-not-iterating
|
||||||
|
# W1638: range-builtin-not-iterating
|
||||||
|
# W1639: filter-builtin-not-iterating
|
||||||
|
# W1640: using-cmp-argument
|
||||||
|
# W1641: eq-without-hash
|
||||||
|
# W1642: div-method
|
||||||
|
# W1643: idiv-method
|
||||||
|
# W1644: rdiv-method
|
||||||
|
# W1645: exception-message-attribute
|
||||||
|
# W1646: invalid-str-codec
|
||||||
|
# W1647: sys-max-int
|
||||||
|
# W1648: bad-python3-import
|
||||||
|
# W1649: deprecated-string-function
|
||||||
|
# W1650: deprecated-str-translate-call
|
||||||
|
# W1651: deprecated-itertools-function
|
||||||
|
# W1652: deprecated-types-field
|
||||||
|
# W1653: next-method-defined
|
||||||
|
# W1654: dict-items-not-iterating
|
||||||
|
# W1655: dict-keys-not-iterating
|
||||||
|
# W1656: dict-values-not-iterating
|
||||||
|
# W1657: deprecated-operator-function
|
||||||
|
# W1658: deprecated-urllib-function
|
||||||
|
# W1659: xreadlines-attribute
|
||||||
|
# W1660: deprecated-sys-function
|
||||||
|
# W1661: exception-escape
|
||||||
|
# W1662: comprehension-escape
|
||||||
|
enable=E1603,E1609,E1610,E1602,E1606,E1608,E1607,E1605,E1604,E1601,E1611,W1652,
|
||||||
|
W1651,W1649,W1657,W1660,W1658,W1659,W1623,W1622,W1620,W1621,W1645,W1641,
|
||||||
|
W1624,W1648,W1625,W1611,W1662,W1661,W1650,W1640,W1630,W1614,W1615,W1642,
|
||||||
|
W1616,W1628,W1643,W1629,W1627,W1644,W1617,W1601,W1602,W1603,W1604,W1605,
|
||||||
|
W1654,W1655,W1656,W1619,W1606,W1607,W1639,W1618,W1632,W1634,W1608,W1636,
|
||||||
|
W1653,W1646,W1638,W1609,W1610,W1626,W1633,W1647,W1635,W1612,W1613,W1637
|
||||||
|
|
||||||
|
# Disable the message, report, category or checker with the given id(s). You
|
||||||
|
# can either give multiple identifiers separated by comma (,) or put this
|
||||||
|
# option multiple times (only on the command line, not in the configuration
|
||||||
|
# file where it should appear only once).You can also use "--disable=all" to
|
||||||
|
# disable everything first and then reenable specific checks. For example, if
|
||||||
|
# you want to run only the similarities checker, you can use "--disable=all
|
||||||
|
# --enable=similarities". If you want to run only the classes checker, but have
|
||||||
|
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||||
|
# --disable=W"
|
||||||
|
# H216, unittest mock
|
||||||
|
# W0107 unnecessary-pass
|
||||||
|
# W0511 fixme
|
||||||
|
# W0602 global-variable-not-assigned
|
||||||
|
# W0603 global-statement
|
||||||
|
# W0703 broad-except
|
||||||
|
# W0707 raise-missing-from
|
||||||
|
# W1406 redundant-u-string-prefix
|
||||||
|
# W1505 deprecated-method
|
||||||
|
# W1514 unspecified-encoding
|
||||||
|
# Disable Python3 checkers:
|
||||||
|
# W1618: no-absolute-import
|
||||||
|
disable=C, H216, R,
|
||||||
|
W0107, W0511, W0602, W0603, W0703, W0707, W1406, W1505, W1514, W1618
|
||||||
|
|
||||||
|
[REPORTS]
|
||||||
|
|
||||||
|
# Set the output format. Available formats are text, parseable, colorized, msvs
|
||||||
|
# (visual studio) and html. You can also give a reporter class, eg
|
||||||
|
# mypackage.mymodule.MyReporterClass.
|
||||||
|
output-format=text
|
||||||
|
|
||||||
|
# Put messages in a separate file for each module / package specified on the
|
||||||
|
# command line instead of printing them on stdout. Reports (if any) will be
|
||||||
|
# written in a file name "pylint_global.[txt|html]".
|
||||||
|
files-output=no
|
||||||
|
|
||||||
|
# Tells whether to display a full report or only the messages
|
||||||
|
reports=no
|
||||||
|
|
||||||
|
# Python expression which should return a note less than 10 (10 is the highest
|
||||||
|
# note). You have access to the variables errors warning, statement which
|
||||||
|
# respectively contain the number of errors / warnings messages and the total
|
||||||
|
# number of statements analyzed. This is used by the global evaluation report
|
||||||
|
# (RP0004).
|
||||||
|
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
|
||||||
|
|
||||||
|
# Add a comment according to your evaluation note. This is used by the global
|
||||||
|
# evaluation report (RP0004).
|
||||||
|
comment=no
|
||||||
|
|
||||||
|
# Template used to display messages. This is a python new-style format string
|
||||||
|
# used to format the message information. See doc for all details
|
||||||
|
#msg-template=
|
||||||
|
|
||||||
|
|
||||||
|
[BASIC]
|
||||||
|
|
||||||
|
# Required attributes for module, separated by a comma
|
||||||
|
required-attributes=
|
||||||
|
|
||||||
|
# List of builtins function names that should not be used, separated by a comma
|
||||||
|
bad-functions=map,filter,apply,input,file
|
||||||
|
|
||||||
|
# Good variable names which should always be accepted, separated by a comma
|
||||||
|
good-names=i,j,k,ex,Run,_
|
||||||
|
|
||||||
|
# Bad variable names which should always be refused, separated by a comma
|
||||||
|
bad-names=foo,bar,baz,toto,tutu,tata
|
||||||
|
|
||||||
|
# Colon-delimited sets of names that determine each other's naming style when
|
||||||
|
# the name regexes allow several styles.
|
||||||
|
name-group=
|
||||||
|
|
||||||
|
# Include a hint for the correct naming format with invalid-name
|
||||||
|
include-naming-hint=no
|
||||||
|
|
||||||
|
# Regular expression matching correct function names
|
||||||
|
function-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||||
|
|
||||||
|
# Naming hint for function names
|
||||||
|
function-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||||
|
|
||||||
|
# Regular expression matching correct variable names
|
||||||
|
variable-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||||
|
|
||||||
|
# Naming hint for variable names
|
||||||
|
variable-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||||
|
|
||||||
|
# Regular expression matching correct constant names
|
||||||
|
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
|
||||||
|
|
||||||
|
# Naming hint for constant names
|
||||||
|
const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
|
||||||
|
|
||||||
|
# Regular expression matching correct attribute names
|
||||||
|
attr-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||||
|
|
||||||
|
# Naming hint for attribute names
|
||||||
|
attr-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||||
|
|
||||||
|
# Regular expression matching correct argument names
|
||||||
|
argument-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||||
|
|
||||||
|
# Naming hint for argument names
|
||||||
|
argument-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||||
|
|
||||||
|
# Regular expression matching correct class attribute names
|
||||||
|
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
|
||||||
|
|
||||||
|
# Naming hint for class attribute names
|
||||||
|
class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
|
||||||
|
|
||||||
|
# Regular expression matching correct inline iteration names
|
||||||
|
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
|
||||||
|
|
||||||
|
# Naming hint for inline iteration names
|
||||||
|
inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
|
||||||
|
|
||||||
|
# Regular expression matching correct class names
|
||||||
|
class-rgx=[A-Z_][a-zA-Z0-9]+$
|
||||||
|
|
||||||
|
# Naming hint for class names
|
||||||
|
class-name-hint=[A-Z_][a-zA-Z0-9]+$
|
||||||
|
|
||||||
|
# Regular expression matching correct module names
|
||||||
|
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
||||||
|
|
||||||
|
# Naming hint for module names
|
||||||
|
module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
|
||||||
|
|
||||||
|
# Regular expression matching correct method names
|
||||||
|
method-rgx=[a-z_][a-z0-9_]{2,30}$
|
||||||
|
|
||||||
|
# Naming hint for method names
|
||||||
|
method-name-hint=[a-z_][a-z0-9_]{2,30}$
|
||||||
|
|
||||||
|
# Regular expression which should only match function or class names that do
|
||||||
|
# not require a docstring.
|
||||||
|
no-docstring-rgx=__.*__
|
||||||
|
|
||||||
|
# Minimum line length for functions/classes that require docstrings, shorter
|
||||||
|
# ones are exempt.
|
||||||
|
docstring-min-length=-1
|
||||||
|
|
||||||
|
|
||||||
|
[FORMAT]
|
||||||
|
|
||||||
|
# Maximum number of characters on a single line.
|
||||||
|
max-line-length=80
|
||||||
|
|
||||||
|
# Regexp for a line that is allowed to be longer than the limit.
|
||||||
|
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
|
||||||
|
|
||||||
|
# Allow the body of an if to be on the same line as the test if there is no
|
||||||
|
# else.
|
||||||
|
single-line-if-stmt=no
|
||||||
|
|
||||||
|
# List of optional constructs for which whitespace checking is disabled
|
||||||
|
no-space-check=trailing-comma,dict-separator
|
||||||
|
|
||||||
|
# Maximum number of lines in a module
|
||||||
|
max-module-lines=1000
|
||||||
|
|
||||||
|
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||||
|
# tab).
|
||||||
|
indent-string=' '
|
||||||
|
|
||||||
|
# Number of spaces of indent required inside a hanging or continued line.
|
||||||
|
indent-after-paren=4
|
||||||
|
|
||||||
|
|
||||||
|
[LOGGING]
|
||||||
|
|
||||||
|
# Logging modules to check that the string format arguments are in logging
|
||||||
|
# function parameter format
|
||||||
|
logging-modules=logging
|
||||||
|
|
||||||
|
|
||||||
|
[MISCELLANEOUS]
|
||||||
|
|
||||||
|
# List of note tags to take in consideration, separated by a comma.
|
||||||
|
notes=FIXME,XXX,TODO
|
||||||
|
|
||||||
|
|
||||||
|
[SIMILARITIES]
|
||||||
|
|
||||||
|
# Minimum lines number of a similarity.
|
||||||
|
min-similarity-lines=4
|
||||||
|
|
||||||
|
# Ignore comments when computing similarities.
|
||||||
|
ignore-comments=yes
|
||||||
|
|
||||||
|
# Ignore docstrings when computing similarities.
|
||||||
|
ignore-docstrings=yes
|
||||||
|
|
||||||
|
# Ignore imports when computing similarities.
|
||||||
|
ignore-imports=no
|
||||||
|
|
||||||
|
|
||||||
|
[TYPECHECK]
|
||||||
|
|
||||||
|
# Tells whether missing members accessed in mixin class should be ignored. A
|
||||||
|
# mixin class is detected if its name ends with "mixin" (case insensitive).
|
||||||
|
ignore-mixin-members=yes
|
||||||
|
|
||||||
|
# List of module names for which member attributes should not be checked
|
||||||
|
# (useful for modules/projects where namespaces are manipulated during runtime
|
||||||
|
# and thus existing member attributes cannot be deduced by static analysis
|
||||||
|
ignored-modules=dnf,libdnf
|
||||||
|
|
||||||
|
# List of classes names for which member attributes should not be checked
|
||||||
|
# (useful for classes with attributes dynamically set).
|
||||||
|
ignored-classes=rpm,PKCS1_PSS
|
||||||
|
|
||||||
|
# When zope mode is activated, add a predefined set of Zope acquired attributes
|
||||||
|
# to generated-members.
|
||||||
|
zope=no
|
||||||
|
|
||||||
|
# List of members which are set dynamically and missed by pylint inference
|
||||||
|
# system, and so shouldn't trigger E0201 when accessed. Python regular
|
||||||
|
# expressions are accepted.
|
||||||
|
generated-members=REQUEST,acl_users,aq_parent
|
||||||
|
|
||||||
|
|
||||||
|
[VARIABLES]
|
||||||
|
|
||||||
|
# Tells whether we should check for unused import in __init__ files.
|
||||||
|
init-import=no
|
||||||
|
|
||||||
|
# A regular expression matching the name of dummy variables (i.e. expectedly
|
||||||
|
# not used).
|
||||||
|
dummy-variables-rgx=_$|dummy
|
||||||
|
|
||||||
|
# List of additional names supposed to be defined in builtins. Remember that
|
||||||
|
# you should avoid to define new builtins when possible.
|
||||||
|
additional-builtins=
|
||||||
|
|
||||||
|
|
||||||
|
[CLASSES]
|
||||||
|
|
||||||
|
# List of interface methods to ignore, separated by a comma. This is used for
|
||||||
|
# instance to not check methods defines in Zope's Interface base class.
|
||||||
|
ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by
|
||||||
|
|
||||||
|
# List of method names used to declare (i.e. assign) instance attributes.
|
||||||
|
defining-attr-methods=__init__,__new__,setUp
|
||||||
|
|
||||||
|
# List of valid names for the first argument in a class method.
|
||||||
|
valid-classmethod-first-arg=cls
|
||||||
|
|
||||||
|
# List of valid names for the first argument in a metaclass class method.
|
||||||
|
valid-metaclass-classmethod-first-arg=mcs
|
||||||
|
|
||||||
|
|
||||||
|
[DESIGN]
|
||||||
|
|
||||||
|
# Maximum number of arguments for function / method
|
||||||
|
max-args=5
|
||||||
|
|
||||||
|
# Argument names that match this expression will be ignored. Default to name
|
||||||
|
# with leading underscore
|
||||||
|
ignored-argument-names=_.*
|
||||||
|
|
||||||
|
# Maximum number of locals for function / method body
|
||||||
|
max-locals=15
|
||||||
|
|
||||||
|
# Maximum number of return / yield for function / method body
|
||||||
|
max-returns=6
|
||||||
|
|
||||||
|
# Maximum number of branch for function / method body
|
||||||
|
max-branches=12
|
||||||
|
|
||||||
|
# Maximum number of statements in function / method body
|
||||||
|
max-statements=50
|
||||||
|
|
||||||
|
# Maximum number of parents for a class (see R0901).
|
||||||
|
max-parents=7
|
||||||
|
|
||||||
|
# Maximum number of attributes for a class (see R0902).
|
||||||
|
max-attributes=7
|
||||||
|
|
||||||
|
# Minimum number of public methods for a class (see R0903).
|
||||||
|
min-public-methods=2
|
||||||
|
|
||||||
|
# Maximum number of public methods for a class (see R0904).
|
||||||
|
max-public-methods=20
|
||||||
|
|
||||||
|
|
||||||
|
[IMPORTS]
|
||||||
|
|
||||||
|
# Deprecated modules which should not be used, separated by a comma
|
||||||
|
deprecated-modules=regsub,TERMIOS,Bastion,rexec
|
||||||
|
|
||||||
|
# Create a graph of every (i.e. internal and external) dependencies in the
|
||||||
|
# given file (report RP0402 must not be disabled)
|
||||||
|
import-graph=
|
||||||
|
|
||||||
|
# Create a graph of external dependencies in the given file (report RP0402 must
|
||||||
|
# not be disabled)
|
||||||
|
ext-import-graph=
|
||||||
|
|
||||||
|
# Create a graph of internal dependencies in the given file (report RP0402 must
|
||||||
|
# not be disabled)
|
||||||
|
int-import-graph=
|
||||||
|
|
||||||
|
|
||||||
|
[EXCEPTIONS]
|
||||||
|
|
||||||
|
# Exceptions that will emit a warning when being caught. Defaults to
|
||||||
|
# "Exception"
|
||||||
|
overgeneral-exceptions=Exception
|
10
sw-patch/cgcs-patch/requirements.txt
Normal file
10
sw-patch/cgcs-patch/requirements.txt
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
# The order of packages is significant, because pip processes them in the order
|
||||||
|
# of appearance. Changing the order has an impact on the overall integration
|
||||||
|
# process, which may cause wedges in the gate later.
|
||||||
|
|
||||||
|
keystonemiddleware
|
||||||
|
oslo_config
|
||||||
|
pecan
|
||||||
|
pycryptodomex
|
||||||
|
lxml
|
||||||
|
requests_toolbelt
|
23
sw-patch/cgcs-patch/setup.cfg
Normal file
23
sw-patch/cgcs-patch/setup.cfg
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
[metadata]
|
||||||
|
license_files = LICENSE
|
||||||
|
name = cgcs_patch
|
||||||
|
classifier =
|
||||||
|
Environment :: OpenStack
|
||||||
|
Intended Audience :: Information Technology
|
||||||
|
Intended Audience :: System Administrators
|
||||||
|
License :: OSI Approved :: Apache Software License
|
||||||
|
Operating System :: POSIX :: Linux
|
||||||
|
Programming Language :: Python
|
||||||
|
Programming Language :: Python :: 3
|
||||||
|
Programming Language :: Python :: 3.6
|
||||||
|
|
||||||
|
[files]
|
||||||
|
packages =
|
||||||
|
cgcs_patch
|
||||||
|
|
||||||
|
[wheel]
|
||||||
|
universal = 1
|
||||||
|
|
||||||
|
[options]
|
||||||
|
install_requires =
|
||||||
|
pycryptodomex
|
23
sw-patch/cgcs-patch/setup.py
Normal file
23
sw-patch/cgcs-patch/setup.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
#
|
||||||
|
# Copyright (c) 2013-2015 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#
|
||||||
|
|
||||||
|
|
||||||
|
import setuptools
|
||||||
|
|
||||||
|
setuptools.setup(
|
||||||
|
name='cgcs_patch',
|
||||||
|
version='1.0',
|
||||||
|
description='CGCS Patch',
|
||||||
|
packages=setuptools.find_packages(),
|
||||||
|
install_requires=[
|
||||||
|
'pycryptodomex',
|
||||||
|
],
|
||||||
|
package_data={
|
||||||
|
# Include templates
|
||||||
|
'': ['templates/*'],
|
||||||
|
}
|
||||||
|
)
|
13
sw-patch/cgcs-patch/test-requirements.txt
Normal file
13
sw-patch/cgcs-patch/test-requirements.txt
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
# The order of packages is significant, because pip processes them in the order
|
||||||
|
# of appearance. Changing the order has an impact on the overall integration
|
||||||
|
# process, which may cause wedges in the gate later.
|
||||||
|
|
||||||
|
hacking>=1.1.0,<=2.0.0 # Apache-2.0
|
||||||
|
bandit<1.7.2;python_version>="3.6" # Apache-2.0
|
||||||
|
coverage!=4.4,>=4.0 # Apache-2.0
|
||||||
|
flake8-bugbear;python_version>="3.6" # MIT
|
||||||
|
isort<5;python_version>="3.6"
|
||||||
|
mock # BSD
|
||||||
|
pylint;python_version>="3.6" # GPLv2
|
||||||
|
stestr # Apache-2.0
|
||||||
|
testtools # MIT
|
146
sw-patch/cgcs-patch/tox.ini
Normal file
146
sw-patch/cgcs-patch/tox.ini
Normal file
@ -0,0 +1,146 @@
|
|||||||
|
#
|
||||||
|
# Copyright (c) 2019 Wind River Systems, Inc.
|
||||||
|
#
|
||||||
|
# SPDX-License-Identifier: Apache-2.0
|
||||||
|
#
|
||||||
|
|
||||||
|
[tox]
|
||||||
|
envlist = flake8,py36,py39,pylint,cover
|
||||||
|
minversion = 2.3.2
|
||||||
|
skipsdist = True
|
||||||
|
|
||||||
|
stxdir = {toxinidir}/../../..
|
||||||
|
|
||||||
|
[testenv]
|
||||||
|
setenv = VIRTUAL_ENV={envdir}
|
||||||
|
LANG=en_US.UTF-8
|
||||||
|
LANGUAGE=en_US:en
|
||||||
|
LC_ALL=C
|
||||||
|
OS_STDERR_CAPTURE=1
|
||||||
|
OS_STDOUT_CAPTURE=1
|
||||||
|
OS_TEST_PATH=./cgcs_patch/tests
|
||||||
|
OS_TEST_TIMEOUT=60
|
||||||
|
PYTHONDONTWRITEBYTECODE=1
|
||||||
|
PYTHONHASHSEED=0
|
||||||
|
PYTHONWARNINGS=default::DeprecationWarning
|
||||||
|
PIP_DISABLE_PIP_VERSION_CHECK=1
|
||||||
|
|
||||||
|
passenv =
|
||||||
|
XDG_CACHE_HOME
|
||||||
|
|
||||||
|
sitepackages = False
|
||||||
|
install_command = pip install \
|
||||||
|
-v -v -v \
|
||||||
|
-c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/starlingx/root/raw/branch/master/build-tools/requirements/debian/upper-constraints.txt} \
|
||||||
|
{opts} {packages}
|
||||||
|
|
||||||
|
deps = -r{toxinidir}/requirements.txt
|
||||||
|
-r{toxinidir}/test-requirements.txt
|
||||||
|
-e{[tox]stxdir}/fault/fm-api/source
|
||||||
|
-e{[tox]stxdir}/config/sysinv/sysinv/sysinv
|
||||||
|
-e{[tox]stxdir}/config/tsconfig/tsconfig
|
||||||
|
|
||||||
|
whitelist_externals = find
|
||||||
|
sh
|
||||||
|
|
||||||
|
[testenv:stestr]
|
||||||
|
commands =
|
||||||
|
find . -name "*.pyc" -delete
|
||||||
|
stestr run {posargs}
|
||||||
|
stestr slowest
|
||||||
|
|
||||||
|
[testenv:py27]
|
||||||
|
basepython = python2.7
|
||||||
|
commands = {[testenv:stestr]commands}
|
||||||
|
|
||||||
|
[testenv:py36]
|
||||||
|
basepython = python3.6
|
||||||
|
commands = {[testenv:stestr]commands}
|
||||||
|
|
||||||
|
[testenv:py39]
|
||||||
|
basepython = python3.9
|
||||||
|
install_command = pip install \
|
||||||
|
-v -v -v \
|
||||||
|
-c{env:UPPER_CONSTRAINTS_FILE:https://opendev.org/starlingx/root/raw/branch/master/build-tools/requirements/debian/upper-constraints.txt} \
|
||||||
|
{opts} {packages}
|
||||||
|
commands =
|
||||||
|
find . -name "*.pyc" -delete
|
||||||
|
stestr run {posargs}
|
||||||
|
stestr slowest
|
||||||
|
|
||||||
|
|
||||||
|
[bandit]
|
||||||
|
# B101: Test for use of assert
|
||||||
|
# B104: Test for binding to all interfaces
|
||||||
|
# B110: Try, Except, Pass detected.
|
||||||
|
# B303: Use of insecure MD2, MD4, MD5, or SHA1 hash function.
|
||||||
|
# B311: Standard pseudo-random generators are not suitable for security/cryptographic purposes
|
||||||
|
# B314: Blacklisted calls to xml.etree.ElementTree
|
||||||
|
# B318: Blacklisted calls to xml.dom.minidom
|
||||||
|
# B320: Blacklisted calls to lxml.etree.parse
|
||||||
|
# B404: Import of subprocess module
|
||||||
|
# B405: import xml.etree
|
||||||
|
# B408: import xml.minidom
|
||||||
|
# B410: import etree
|
||||||
|
# B413: import pyCrypto
|
||||||
|
# B506: Test for use of yaml load
|
||||||
|
# B602: Test for use of popen with shell equals true
|
||||||
|
# B603: Test for use of subprocess without shell equals true
|
||||||
|
# B607: Test for starting a process with a partial path
|
||||||
|
skips = B101,B104,B110,B303,B311,B314,B318,B320,B404,B405,B408,B410,B413,B506,B602,B603,B607
|
||||||
|
exclude = tests
|
||||||
|
|
||||||
|
[testenv:bandit]
|
||||||
|
basepython = python3
|
||||||
|
deps = -r{toxinidir}/test-requirements.txt
|
||||||
|
commands = bandit --ini tox.ini -n 5 -r cgcs_patch
|
||||||
|
|
||||||
|
[flake8]
|
||||||
|
# ignore below errors , will fix flake8 errors in future
|
||||||
|
# H101 Use TODO(NAME)
|
||||||
|
# H102 Apache 2.0 license header not found
|
||||||
|
# H105 Don't use author tags
|
||||||
|
# H306 imports not in alphabetical order
|
||||||
|
# H401 docstring should not start with a space
|
||||||
|
# H404 multi line docstring should start without a leading new line
|
||||||
|
# H405 multi line docstring summary not separated with an empty line
|
||||||
|
# Note: W503 and W504 are mutually exclusive. Must select one of them to suppress.
|
||||||
|
# W504 line break after binary operator
|
||||||
|
# E501 line too long. skipped because some of the code files include templates
|
||||||
|
# that end up quite wide
|
||||||
|
# F401 'XXXXX' imported but unused
|
||||||
|
show-source = True
|
||||||
|
ignore = H101,H102,H105,H306,H401,H404,H405,
|
||||||
|
W504,E501,F401
|
||||||
|
exclude = .venv,.git,.tox,dist,doc,*lib/python*,*egg,build,release-tag-*
|
||||||
|
# H106: Don't put vim configuration in source files (off by default).
|
||||||
|
# H203: Use assertIs(Not)None to check for None (off by default).
|
||||||
|
# (todo) enable H904 Delay string interpolations at logging calls (off by default)
|
||||||
|
enable-extensions = H106 H203
|
||||||
|
max-line-length = 80
|
||||||
|
|
||||||
|
[testenv:flake8]
|
||||||
|
basepython = python3
|
||||||
|
deps = {[testenv]deps}
|
||||||
|
usedevelop = False
|
||||||
|
#skip_install = True
|
||||||
|
commands =
|
||||||
|
flake8 {posargs} .
|
||||||
|
|
||||||
|
[testenv:pylint]
|
||||||
|
basepython = python3
|
||||||
|
sitepackages = True
|
||||||
|
commands = pylint cgcs_patch --rcfile=./pylint.rc
|
||||||
|
|
||||||
|
[testenv:cover]
|
||||||
|
setenv =
|
||||||
|
PYTHON=coverage run --parallel-mode
|
||||||
|
PYTHONDONTWRITEBYTECODE=True
|
||||||
|
|
||||||
|
commands = coverage erase
|
||||||
|
find . -name "*.pyc" -delete
|
||||||
|
stestr run {posargs}
|
||||||
|
coverage combine
|
||||||
|
coverage html -d cover
|
||||||
|
coverage xml -o cover/coverage.xml
|
||||||
|
coverage report
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user