From a4174a1e0a211e1629b841ecf26596b01c2464b1 Mon Sep 17 00:00:00 2001
From: Scott Little <scott.little@windriver.com>
Date: Thu, 25 Jul 2024 15:40:41 -0400
Subject: [PATCH] Remove CentOS/OpenSUSE build support

StarlingX stopped supporting CentOS builds in the after release 7.0.
This update will strip CentOS from our code base.  It will also
remove references to the failed OpenSUSE feature as well.

There is still one CentOS based docker image (n3000), so
this update will leave limited CentOS support in download
and docker image building tools.

Verified with a full Jenkins master branch build for Debian.
- download full and incremental
- package build full and incremental
- iso build
- build of base container image
- build all flock container images
- helm chart build.

Story: 2011110
Task: 49939
Change-Id: I57939d2026d7df76091e8658750b4bd0fa8e4f5f
Signed-off-by: Scott Little <scott.little@windriver.com>
---
 .gitignore                                    |    3 -
 build-data/build_avoidance_source             |   23 -
 build-data/unbuilt_rpm_patterns               |   22 -
 build-tools/Cached_Data.txt                   |   78 -
 build-tools/audit-pkgs                        |   80 -
 build-tools/build-avoidance-utils.sh          |  923 -----
 build-tools/build-docker-images/README        |    9 +-
 .../base-image-build-centos-dev.cfg           |    2 -
 .../base-image-build-centos-stable.cfg        |    2 -
 .../build-docker-images/build-stx-base.sh     |   43 +-
 ...arlingx-enable-disable-package-repos.patch |   44 +-
 .../stx-centos/Dockerfile.dev                 |   16 -
 .../stx-centos/Dockerfile.stable              |   31 -
 .../build-docker-images/update-stx-image.sh   |    2 +-
 build-tools/build-guest                       |  412 --
 build-tools/build-helm-charts.sh              |  100 +-
 build-tools/build-img                         |  638 ---
 build-tools/build-iso                         |  853 ----
 build-tools/build-pkg-srpm                    |   63 -
 build-tools/build-pkgs                        |  126 -
 build-tools/build-pkgs-parallel               |  538 ---
 build-tools/build-pkgs-serial                 |  538 ---
 build-tools/build-rpms                        |   44 -
 build-tools/build-rpms-parallel               | 2507 ------------
 build-tools/build-rpms-serial                 | 2220 ----------
 build-tools/build-srpms                       |   52 -
 build-tools/build-srpms-common.sh             |  106 -
 build-tools/build-srpms-parallel              | 1605 --------
 build-tools/build-srpms-serial                | 1424 -------
 build-tools/build-wheels/FIXME                |    5 +-
 build-tools/build-wheels/build-base-wheels.sh |    4 +-
 .../build-wheels/build-wheel-tarball.sh       |    2 +-
 build-tools/build-wheels/centos/Dockerfile    |   46 -
 .../build-wheels/centos/dev-wheels-py2.cfg    |   20 -
 .../build-wheels/centos/dev-wheels.cfg        |   21 -
 build-tools/build-wheels/centos/openstack.cfg |   13 -
 .../build-wheels/centos/stable-wheels-py2.cfg |  178 -
 .../build-wheels/centos/stable-wheels.cfg     |  183 -
 build-tools/build-wheels/doc/wheels-cfg.md    |    2 +-
 build-tools/build-wheels/get-stx-wheels.sh    |   54 +-
 build-tools/build_guest/build-guest-image.py  |  123 -
 build-tools/build_guest/image-rt.inc          |   14 -
 build-tools/build_guest/image.inc             |   14 -
 build-tools/build_guest/rootfs-exclude.txt    |   13 -
 .../build_guest/rootfs-rt/boot/extlinux.conf  |    7 -
 build-tools/build_guest/rootfs-setup.sh       |   92 -
 .../build_guest/rootfs-std/boot/extlinux.conf |    7 -
 .../cloud/cloud.cfg.d/99_wrs-datasources.cfg  |   18 -
 .../build_guest/rootfs/etc/dhcp/dhclient.conf |   21 -
 .../build_guest/rootfs/etc/iptables.rules     |   12 -
 .../rootfs/etc/modprobe.d/floppy.conf         |    1 -
 .../rootfs/etc/modprobe.d/wrs_avp.conf        |    1 -
 .../rootfs/etc/modules-load.d/wrs_avp.conf    |    1 -
 .../etc/sysconfig/network-scripts/ifcfg-eth0  |    8 -
 .../etc/udev/rules.d/65-renumber-net.rules    |    4 -
 .../rootfs/usr/lib/udev/renumber_device       |   12 -
 .../build_guest/rpm-install-list-rt.txt       |  294 --
 build-tools/build_guest/rpm-install-list.txt  |  291 --
 build-tools/build_guest/rpm-remove-list.txt   |    7 -
 build-tools/build_iso/anaconda-ks.cfg         |   40 -
 build-tools/build_iso/cgts_deps.sh            |  352 --
 build-tools/build_iso/comps.xml.gz            |  Bin 160726 -> 0 bytes
 build-tools/build_iso/gather_packages.pl      |  122 -
 build-tools/build_iso/image-dev.inc           |    6 -
 build-tools/build_iso/image.inc               |   84 -
 build-tools/build_iso/isolinux.cfg            |  125 -
 build-tools/build_iso/ks.cfg                  |   36 -
 build-tools/build_iso/minimal_rpm_list.txt    |  256 --
 build-tools/build_iso/openstack_kilo.txt      |    2 -
 build-tools/build_minimal_iso/README          |  112 -
 build-tools/build_minimal_iso/README.2        |    5 -
 build-tools/build_minimal_iso/build.cfg       |  108 -
 build-tools/build_minimal_iso/build.sh        |   64 -
 build-tools/build_minimal_iso/build_centos.sh |   62 -
 build-tools/build_minimal_iso/cgts_deps.sh    |  265 --
 build-tools/build_minimal_iso/yum.conf        |   22 -
 build-tools/classify                          |   55 -
 build-tools/create-yum-conf                   |   87 -
 build-tools/create_dependancy_cache.py        |  716 ----
 build-tools/default_build_srpm                |  277 --
 build-tools/find_klm                          |   59 -
 .../find_patched_srpms_needing_upgrade        |   54 -
 build-tools/helm_chart_modify.py              |    8 +-
 build-tools/image-utils.sh                    |    4 +-
 build-tools/ip_report.py                      |  523 ---
 build-tools/make-installer-images.sh          |  343 --
 build-tools/mk/_sign_pkgs.mk                  |   31 -
 build-tools/mockchain-parallel                |  114 -
 build-tools/mockchain-parallel-1.3.4          | 1219 ------
 build-tools/mockchain-parallel-1.4.16         | 1226 ------
 build-tools/mockchain-parallel-2.6            | 1221 ------
 build-tools/mockchain-parallel-2.7            | 1221 ------
 build-tools/modify-build-cfg                  |  205 -
 build-tools/patch-iso                         |  427 --
 build-tools/patch_rebase_1                    |  140 -
 build-tools/patch_rebase_2                    |  158 -
 build-tools/patch_rebase_3                    |  129 -
 build-tools/patch_rebase_4                    |  413 --
 build-tools/repo_files/comps.xml              |   39 -
 build-tools/repo_files/mock.cfg.all.proto     |    1 -
 .../repo_files/mock.cfg.centos7.all.proto     |   62 -
 .../repo_files/mock.cfg.centos7.distro.proto  |   62 -
 build-tools/repo_files/mock.cfg.centos7.proto |   61 -
 .../repo_files/mock.cfg.centos8.all.proto     |   63 -
 .../repo_files/mock.cfg.centos8.distro.proto  |   63 -
 build-tools/repo_files/mock.cfg.centos8.proto |   62 -
 build-tools/repo_files/mock.cfg.distro.proto  |    1 -
 build-tools/repo_files/mock.cfg.proto         |    1 -
 build-tools/sign-rpms                         |  293 --
 build-tools/sign-secure-boot                  |  538 ---
 build-tools/source_lookup.txt                 |   85 -
 build-tools/spec-utils                        |  713 ----
 build-tools/srpm-utils                        | 3630 -----------------
 build-tools/stx/discovery.py                  |    2 +-
 build-tools/stxRpmUtils.py                    |   41 -
 build-tools/sync-jenkins                      |  154 -
 build-tools/sync_jenkins.sh                   |  145 -
 build-tools/tis.macros                        |   11 -
 build-tools/update-efiboot-image              |  243 --
 build-tools/update-pxe-network-installer      |  197 -
 build-tools/url_utils.sh                      |    2 +
 build-tools/wheel-utils.sh                    |    4 +-
 122 files changed, 64 insertions(+), 30672 deletions(-)
 delete mode 100644 build-data/build_avoidance_source
 delete mode 100644 build-data/unbuilt_rpm_patterns
 delete mode 100644 build-tools/Cached_Data.txt
 delete mode 100755 build-tools/audit-pkgs
 delete mode 100644 build-tools/build-avoidance-utils.sh
 delete mode 100644 build-tools/build-docker-images/base-image-build-centos-dev.cfg
 delete mode 100644 build-tools/build-docker-images/base-image-build-centos-stable.cfg
 delete mode 100644 build-tools/build-docker-images/stx-centos/Dockerfile.dev
 delete mode 100644 build-tools/build-docker-images/stx-centos/Dockerfile.stable
 delete mode 100755 build-tools/build-guest
 delete mode 100755 build-tools/build-img
 delete mode 100755 build-tools/build-iso
 delete mode 100644 build-tools/build-pkg-srpm
 delete mode 100755 build-tools/build-pkgs
 delete mode 100755 build-tools/build-pkgs-parallel
 delete mode 100755 build-tools/build-pkgs-serial
 delete mode 100755 build-tools/build-rpms
 delete mode 100755 build-tools/build-rpms-parallel
 delete mode 100755 build-tools/build-rpms-serial
 delete mode 100755 build-tools/build-srpms
 delete mode 100644 build-tools/build-srpms-common.sh
 delete mode 100755 build-tools/build-srpms-parallel
 delete mode 100755 build-tools/build-srpms-serial
 delete mode 100644 build-tools/build-wheels/centos/Dockerfile
 delete mode 100644 build-tools/build-wheels/centos/dev-wheels-py2.cfg
 delete mode 100644 build-tools/build-wheels/centos/dev-wheels.cfg
 delete mode 100644 build-tools/build-wheels/centos/openstack.cfg
 delete mode 100644 build-tools/build-wheels/centos/stable-wheels-py2.cfg
 delete mode 100644 build-tools/build-wheels/centos/stable-wheels.cfg
 delete mode 100755 build-tools/build_guest/build-guest-image.py
 delete mode 100644 build-tools/build_guest/image-rt.inc
 delete mode 100644 build-tools/build_guest/image.inc
 delete mode 100644 build-tools/build_guest/rootfs-exclude.txt
 delete mode 100644 build-tools/build_guest/rootfs-rt/boot/extlinux.conf
 delete mode 100755 build-tools/build_guest/rootfs-setup.sh
 delete mode 100644 build-tools/build_guest/rootfs-std/boot/extlinux.conf
 delete mode 100644 build-tools/build_guest/rootfs/etc/cloud/cloud.cfg.d/99_wrs-datasources.cfg
 delete mode 100644 build-tools/build_guest/rootfs/etc/dhcp/dhclient.conf
 delete mode 100644 build-tools/build_guest/rootfs/etc/iptables.rules
 delete mode 100644 build-tools/build_guest/rootfs/etc/modprobe.d/floppy.conf
 delete mode 100644 build-tools/build_guest/rootfs/etc/modprobe.d/wrs_avp.conf
 delete mode 100644 build-tools/build_guest/rootfs/etc/modules-load.d/wrs_avp.conf
 delete mode 100644 build-tools/build_guest/rootfs/etc/sysconfig/network-scripts/ifcfg-eth0
 delete mode 100644 build-tools/build_guest/rootfs/etc/udev/rules.d/65-renumber-net.rules
 delete mode 100755 build-tools/build_guest/rootfs/usr/lib/udev/renumber_device
 delete mode 100644 build-tools/build_guest/rpm-install-list-rt.txt
 delete mode 100644 build-tools/build_guest/rpm-install-list.txt
 delete mode 100644 build-tools/build_guest/rpm-remove-list.txt
 delete mode 100644 build-tools/build_iso/anaconda-ks.cfg
 delete mode 100755 build-tools/build_iso/cgts_deps.sh
 delete mode 100644 build-tools/build_iso/comps.xml.gz
 delete mode 100755 build-tools/build_iso/gather_packages.pl
 delete mode 100644 build-tools/build_iso/image-dev.inc
 delete mode 100644 build-tools/build_iso/image.inc
 delete mode 100644 build-tools/build_iso/isolinux.cfg
 delete mode 100644 build-tools/build_iso/ks.cfg
 delete mode 100644 build-tools/build_iso/minimal_rpm_list.txt
 delete mode 100644 build-tools/build_iso/openstack_kilo.txt
 delete mode 100644 build-tools/build_minimal_iso/README
 delete mode 100644 build-tools/build_minimal_iso/README.2
 delete mode 100644 build-tools/build_minimal_iso/build.cfg
 delete mode 100755 build-tools/build_minimal_iso/build.sh
 delete mode 100755 build-tools/build_minimal_iso/build_centos.sh
 delete mode 100755 build-tools/build_minimal_iso/cgts_deps.sh
 delete mode 100644 build-tools/build_minimal_iso/yum.conf
 delete mode 100644 build-tools/classify
 delete mode 100755 build-tools/create-yum-conf
 delete mode 100755 build-tools/create_dependancy_cache.py
 delete mode 100755 build-tools/default_build_srpm
 delete mode 100755 build-tools/find_klm
 delete mode 100755 build-tools/find_patched_srpms_needing_upgrade
 delete mode 100755 build-tools/ip_report.py
 delete mode 100755 build-tools/make-installer-images.sh
 delete mode 100644 build-tools/mk/_sign_pkgs.mk
 delete mode 100755 build-tools/mockchain-parallel
 delete mode 100755 build-tools/mockchain-parallel-1.3.4
 delete mode 100755 build-tools/mockchain-parallel-1.4.16
 delete mode 100755 build-tools/mockchain-parallel-2.6
 delete mode 100755 build-tools/mockchain-parallel-2.7
 delete mode 100755 build-tools/modify-build-cfg
 delete mode 100755 build-tools/patch-iso
 delete mode 100755 build-tools/patch_rebase_1
 delete mode 100755 build-tools/patch_rebase_2
 delete mode 100755 build-tools/patch_rebase_3
 delete mode 100755 build-tools/patch_rebase_4
 delete mode 100644 build-tools/repo_files/comps.xml
 delete mode 120000 build-tools/repo_files/mock.cfg.all.proto
 delete mode 100644 build-tools/repo_files/mock.cfg.centos7.all.proto
 delete mode 100644 build-tools/repo_files/mock.cfg.centos7.distro.proto
 delete mode 100644 build-tools/repo_files/mock.cfg.centos7.proto
 delete mode 100644 build-tools/repo_files/mock.cfg.centos8.all.proto
 delete mode 100644 build-tools/repo_files/mock.cfg.centos8.distro.proto
 delete mode 100644 build-tools/repo_files/mock.cfg.centos8.proto
 delete mode 120000 build-tools/repo_files/mock.cfg.distro.proto
 delete mode 120000 build-tools/repo_files/mock.cfg.proto
 delete mode 100755 build-tools/sign-rpms
 delete mode 100755 build-tools/sign-secure-boot
 delete mode 100644 build-tools/source_lookup.txt
 delete mode 100644 build-tools/spec-utils
 delete mode 100644 build-tools/srpm-utils
 delete mode 100644 build-tools/stxRpmUtils.py
 delete mode 100755 build-tools/sync-jenkins
 delete mode 100755 build-tools/sync_jenkins.sh
 delete mode 100644 build-tools/tis.macros
 delete mode 100755 build-tools/update-efiboot-image
 delete mode 100755 build-tools/update-pxe-network-installer

diff --git a/.gitignore b/.gitignore
index 56f0088c..72860aec 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,9 +1,6 @@
 *.swp
 .tox
 __pycache__/
-/centos-repo
-/cgcs-centos-repo
-/cgcs-tis-repo
 /local-build-data
 /local-repo
 /public-keys/
diff --git a/build-data/build_avoidance_source b/build-data/build_avoidance_source
deleted file mode 100644
index 1e371f24..00000000
--- a/build-data/build_avoidance_source
+++ /dev/null
@@ -1,23 +0,0 @@
-#
-# Copyright (c) 2018 Wind River Systems, Inc.
-#
-# SPDX-License-Identifier: Apache-2.0
-#
-
-
-#
-# What files and directories need to be copied
-#
-BUILD_AVOIDANCE_SRPM_DIRECTORIES="inputs srpm_assemble rpmbuild/SRPMS rpmbuild/SOURCES"
-BUILD_AVOIDANCE_SRPM_FILES=""
-BUILD_AVOIDANCE_RPM_DIRECTORIES="results rpmbuild/RPMS rpmbuild/SPECS repo/local-repo/dependancy-cache"
-BUILD_AVOIDANCE_RPM_FILES=".platform_release"
-
-
-#
-# Copy the lines below to $MY_REPO/local-build-data/build_avoidance_source,
-# then uncomment and fill in the values giving the location of your local reference build.
-#
-# BUILD_AVOIDANCE_USR="jenkins"
-# BUILD_AVOIDANCE_HOST="machine.corp.com"
-# BUILD_AVOIDANCE_DIR="/localdisk/loadbuild/jenkins/StarlingX_Build"
diff --git a/build-data/unbuilt_rpm_patterns b/build-data/unbuilt_rpm_patterns
deleted file mode 100644
index 35df2a4f..00000000
--- a/build-data/unbuilt_rpm_patterns
+++ /dev/null
@@ -1,22 +0,0 @@
-[-]locale[-]
-[-]doc[-]
-[-]dbg[-]
-vswitch-staticdev
-vim-spell
-openssh-server-sysvinit
-openstack-neutron-linuxbridge
-^libcacard-
-^kernel-bootwrapper
-^kernel-doc-
-^kernel-abi-whitelists
-^kernel-debug-
-^kernel-kdump
-^kernel-rt-bootwrapper
-^kernel-rt-doc-
-^kernel-rt-abi-whitelists
-^kernel-rt-debug-
-^kernel-rt-debuginfo
-^kernel-rt-kdump
-^kernel-rt-cross-headers
-^kernel-rt-kvm-debuginfo
-^kernel-rt-tools-debuginfo
diff --git a/build-tools/Cached_Data.txt b/build-tools/Cached_Data.txt
deleted file mode 100644
index e3a7d1fe..00000000
--- a/build-tools/Cached_Data.txt
+++ /dev/null
@@ -1,78 +0,0 @@
-Data on an source rpm:
-
-   location:
-      ${MY_WORKSPACE}/${BUILD_TYPE}/rpmbuild/SPECS/${SRPM_FILE_NAME}/
-
-      files:
-         *.spec     # spec file found in the source rpm
-
-      subdirectories:
-         NAMES/     # Directory contains an emtpy file, where the file name 
-                    # is the name of the source rpm.
-
-         SERVICES/  # Directory contains zero or more emtpy files, where the 
-                    # file name is the name of the service provided by one 
-                    # or more of the rpms.
-   
-         BUILDS/    # Directory contains emtpy files, where the file name is 
-                    # the name of a binary rpm built from the source rpm.
-
-         BUILDS_VR/ # Directory contains emtpy files, where the file name is 
-                    # the name-verion-release of a binary rpm built from the 
-                    # source rpm.
-
-   location:
-      ${MY_WORKSPACE}/${BUILD_TYPE}/rpmbuild/SOURCES/${SRPM_FILE_NAME}/
-
-      files:
-         BIG        # if it exists, it contains one line, the numeric value 
-                    # extracted from build_srpms.data if the line 
-                    # BUILD_IS_BIG=### if present.  
-                    # This is the estimated filesystem size (GB) required to 
-                    # host a mock build of the package.
-                    # Note: not all parallel build environments are the same 
-                    # size.  The smallest build environmnet is 3 GB and this 
-                    # is sufficient for most packages.  Don't bother adding a 
-                    # BUILD_IS_BIG=### directive unless 3 gb is proven to be
-                    # insufficient.
-
-         SLOW       # if it exists, it contains one line, the numeric value i
-                    # extracted from build_srpms.data if the line 
-                    # BUILD_IS_SLOW=### if present.
-                    # This is the estimated build time (minutes) required to 
-                    # host perform a mock build of the package.
-                    # Note: Currently we only use this value as a boolean. 
-                    # Non-zero and we try to start the build of this package 
-                    # earlier rather than later.  Build times >= 3 minutes are 
-                    # worth anotating.  Else don't bother adding a 
-                    # BUILD_IS_SLOW=### directive
-e.g.
-
-cd $MY_WORKSPACE/std/rpmbuild/SPECS/openstack-cinder-9.1.1-0.tis.40.src.rpm
-find .
-./BUILDS
-./BUILDS/openstack-cinder
-./BUILDS/python-cinder
-./BUILDS/python-cinder-tests
-./NAMES
-./NAMES/openstack-cinder
-./SERVICES
-./SERVICES/cinder
-./BUILDS_VR
-./BUILDS_VR/openstack-cinder-9.1.1-0.tis.40
-./BUILDS_VR/python-cinder-9.1.1-0.tis.40
-./BUILDS_VR/python-cinder-tests-9.1.1-0.tis.40
-./openstack-cinder.spec
-
-
-e.g.
-cd $MY_WORKSPACE/std/rpmbuild/SOURCES/kernel-3.10.0-514.16.1.el7.29.tis.src.rpm
-find .
-./BIG
-./SLOW
-
-cat ./BIG
-8
-
-cat ./SLOW
-12
diff --git a/build-tools/audit-pkgs b/build-tools/audit-pkgs
deleted file mode 100755
index 370c9b0c..00000000
--- a/build-tools/audit-pkgs
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/bin/bash
-
-rpm_compare () {
-   local r="$1"
-   local r2="$2"
-   local line
-   local f=$(basename $r)
-   local f2=$(basename $r2)
-
-   rpm -q --dump --nosignature -p $r  | awk ' { print $1 "\n" $1 " " $5 " " $6 " " $7 " " $8 " " $9 " " $10 " " $11 } ' > /tmp/dump.new
-   rpm -q --dump --nosignature -p $r2 | awk ' { print $1 "\n" $1 " " $5 " " $6 " " $7 " " $8 " " $9 " " $10 " " $11 } ' > /tmp/dump.old
-   first_line=1
-   diff -y -W 200 --suppress-common-lines /tmp/dump.new /tmp/dump.old | grep '|' |
-   while read -r line; do
-      left=$(echo "$line" | awk -F '|' '{ print $1 }')
-      right=$(echo "$line" | awk -F '|' '{ print $2 }')
-      left_f=$(echo "$left" | awk '{ print $1 }')
-      right_f=$(echo "$right" | awk '{ print $1 }')
-      if [ "$left_f" != "$right_f" ];then
-         continue
-      fi
-      if  [ $first_line -eq 1 ]; then
-         echo ""
-         echo "$f   vs   $f2"
-         first_line=0
-      fi
-      echo "$line"
-   done
-}
-
-# For backward compatibility.  Old repo location or new?
-CENTOS_REPO=${MY_REPO}/centos-repo
-if [ ! -d ${CENTOS_REPO} ]; then
-    CENTOS_REPO=${MY_REPO}/cgcs-centos-repo
-    if [ ! -d ${CENTOS_REPO} ]; then
-        echo "ERROR: directory ${MY_REPO}/centos-repo not found."
-        exit 1
-    fi
-fi
-
-echo ""
-echo "======================================================"
-echo "Auditing built packages vs unpatched upstream packages"
-echo "======================================================"
-for r in $(find $MY_WORKSPACE/*/rpmbuild/RPMS -name '*.rpm' | grep -v '.src.rpm' | grep -v debuginfo); do
-   f=$(basename $r)
-   f2=$(echo $f | sed 's#[.]tis[.][0-9]*[.]#.#' | sed 's#[.]tis[.]#.#')
-   r2=$(find ${CENTOS_REPO}/Binary/ -name $f2)
-   if [ "$r2" == "" ]; then
-      # Probably one of our own
-      # echo "Couldn't find '$f2'"
-      continue
-   fi
-   rpm_compare "$r" "$r2"
-done
-
-echo ""
-echo "============================"
-echo "Auditing built for conflicts"
-echo "============================"
-grep 'conflicts with file from package' -r --binary-files=without-match $MY_WORKSPACE/*/results/ |
-
-while read -r line; do
-   w=$(echo "$line" | awk '{ print $8 }')".rpm"
-   w2=$(echo "$line" | awk '{ print $14 }')".rpm"
-   echo "$w $w2"
-done | sort --unique | sed 's#bash-completion-1:#bash-completion-#' |
-
-while read -r line2; do
-   f=$(echo "$line2" | awk '{ print $1 }')
-   f2=$(echo "$line2" | awk '{ print $2 }')
-   r=$(find ${CENTOS_REPO}/Binary/ $MY_WORKSPACE/*/rpmbuild/RPMS -name $f)
-   r2=$(find ${CENTOS_REPO}/Binary/ $MY_WORKSPACE/*/rpmbuild/RPMS -name $f2)
-   # echo ""
-   # echo "$f   vs   $f2"
-   # echo "$r   vs   $r2"
-   if [ "$r" != "" ] && [ "$r2" != "" ]; then
-      rpm_compare "$r" "$r2"
-   fi
-done
diff --git a/build-tools/build-avoidance-utils.sh b/build-tools/build-avoidance-utils.sh
deleted file mode 100644
index 5ac5858d..00000000
--- a/build-tools/build-avoidance-utils.sh
+++ /dev/null
@@ -1,923 +0,0 @@
-#
-# Copyright (c) 2018 Wind River Systems, Inc.
-#
-# SPDX-License-Identifier: Apache-2.0
-#
-
-#
-# Functions related to build avoidance.
-#
-# Do not call directly.  Used by build-pkgs.
-#
-# Build avoidance downloads rpm, src.rpm and other artifacts of
-# build-pkgs for a local reference build.  The reference would
-# typically be an automated build run atleast daily.
-# The MY_WORKSPACE directory for the reference build shall have
-# a common root directory, and a leaf directory that is a time stamp
-# in a sortable parsable format.   Default YYYYMMDDThhmmssZ.
-#  e.g. /localdisk/loadbuild/jenkins/StarlingX/20180719T113021Z
-#
-# Other formats can be used by setting the following variables
-# in $MY_REPO/local-build-data/build_avoidance_source.
-#   e.g. to allow format YYYY-MM-DD_hh-mm-ss
-# BUILD_AVOIDANCE_DATE_FORMAT="%Y-%m-%d"
-# BUILD_AVOIDANCE_TIME_FORMAT="%H-%M-%S"
-# BUILD_AVOIDANCE_DATE_TIME_DELIM="_"
-# BUILD_AVOIDANCE_DATE_TIME_POSTFIX=""
-#
-# Note: Must be able to rsync and ssh to the machine that holds the
-# reference builds.
-#
-# In future alternative transfer protocols may be supported.
-# Select the alternate protocol by setting the following variables
-# in $MY_REPO/local-build-data/build_avoidance_source.
-# e.g.
-# BUILD_AVOIDANCE_FILE_TRANSFER="my-supported-prototcol"
-#
-
-BUILD_AVOIDANCE_UTILS_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}" )" )"
-
-source "${BUILD_AVOIDANCE_UTILS_DIR}/git-utils.sh"
-
-BUILD_AVOIDANCE_USR=""
-BUILD_AVOIDANCE_HOST=""
-BUILD_AVOIDANCE_DIR=""
-BUILD_AVOIDANCE_URL=""
-
-# Default date/time format, iso-8601 compact, 20180912T143913Z
-# Syntax is a subset of that use by the unix 'date' command.
-BUILD_AVOIDANCE_DATE_FORMAT="%Y%m%d"
-BUILD_AVOIDANCE_TIME_FORMAT="%H%M%S"
-BUILD_AVOIDANCE_DATE_TIME_DELIM="T"
-BUILD_AVOIDANCE_DATE_TIME_POSTFIX="Z"
-
-# Default file transfer method
-BUILD_AVOIDANCE_FILE_TRANSFER="rsync"
-
-# Default is to use timestamps and days in UTC
-#
-# If you prefer local time, then set 'BUILD_AVOIDANCE_DATE_UTC=0'
-# in '$MY_REPO/local-build-data/build_avoidance_source'
-BUILD_AVOIDANCE_DATE_UTC=1
-
-BUILD_AVOIDANCE_DATA_DIR="$MY_WORKSPACE/build_avoidance_data"
-BUILD_AVOIDANCE_SOURCE="$MY_REPO/build-data/build_avoidance_source"
-BUILD_AVOIDANCE_LOCAL_SOURCE="$MY_REPO/local-build-data/build_avoidance_source"
-BUILD_AVOIDANCE_TEST_CONTEXT="$BUILD_AVOIDANCE_DATA_DIR/test_context"
-
-if [ ! -f $BUILD_AVOIDANCE_SOURCE ]; then
-    echo "Couldn't read $BUILD_AVOIDANCE_SOURCE"
-    exit 1
-fi
-
-echo "Reading: $BUILD_AVOIDANCE_SOURCE"
-source $BUILD_AVOIDANCE_SOURCE
-
-if [ -f $BUILD_AVOIDANCE_LOCAL_SOURCE ]; then
-    echo "Reading: $BUILD_AVOIDANCE_LOCAL_SOURCE"
-    source $BUILD_AVOIDANCE_LOCAL_SOURCE
-fi
-
-UTC=""
-
-if [ $BUILD_AVOIDANCE_DATE_UTC -eq 1 ]; then
-    UTC="--utc"
-fi
-
-
-if [ "x$BUILD_AVOIDANCE_OVERRIDE_DIR" != "x" ]; then
-    BUILD_AVOIDANCE_DIR="$BUILD_AVOIDANCE_OVERRIDE_DIR"
-fi
-
-if [ "x$BUILD_AVOIDANCE_OVERRIDE_HOST" != "x" ]; then
-    BUILD_AVOIDANCE_HOST="$BUILD_AVOIDANCE_OVERRIDE_HOST"
-fi
-
-if [ "x$BUILD_AVOIDANCE_OVERRIDE_USR" != "x" ]; then
-    BUILD_AVOIDANCE_USR="$BUILD_AVOIDANCE_OVERRIDE_USR"
-fi
-
-echo "BUILD_AVOIDANCE_DIR=$BUILD_AVOIDANCE_DIR"
-echo "BUILD_AVOIDANCE_HOST=$BUILD_AVOIDANCE_HOST"
-echo "BUILD_AVOIDANCE_USR=$BUILD_AVOIDANCE_USR"
-
-build_avoidance_last_sync_file () {
-    local BUILD_TYPE=$1
-
-    if [ -z "$BUILD_TYPE" ]; then
-        echo "build_avoidance_last_sync_file: Build type not set"
-        exit 1
-    fi
-    echo "$BUILD_AVOIDANCE_DATA_DIR/$BUILD_TYPE/last_sync_context"
-}
-
-build_avoidance_clean () {
-    local BUILD_TYPE=$1
-    local lsf
-
-    if [ "$BUILD_TYPE" == "" ]; then
-        for lsf in $(find $BUILD_AVOIDANCE_DATA_DIR -name last_sync_context); do
-            \rm -f -v "$lsf"
-        done
-    else
-        lsf="$(build_avoidance_last_sync_file $BUILD_TYPE)"
-        if [ -f $lsf ]; then
-            \rm -f -v "$lsf"
-        fi
-    fi
-}
-
-
-date_to_iso_8601 () {
-    local DATE="$1"
-    local CENTURY=""
-    local YEAR_IN_CENTURY="00"
-    local MONTH="01"
-    local DAY="01"
-    local DAY_OF_YEAR=""
-
-    CENTURY="$(date  '+%C')"
-
-    for x in $(echo "${BUILD_AVOIDANCE_DATE_FORMAT}" | tr ' ' '#' | sed 's/%%/#/g' | tr '%' ' ' ); do
-        # Consume format case options
-        case ${x:0:1} in
-            ^) x=${x:1};;
-            \#) x=${x:1};;
-            *) ;;
-        esac
-
-        # Process format
-        case $x in
-            Y*)  CENTURY=${DATE:0:2}; YEAR_IN_CENTURY=${DATE:2:2}; DATE=${DATE:4}; x=${x:1};;
-            0Y*) CENTURY=${DATE:0:2}; YEAR_IN_CENTURY=${DATE:2:2}; DATE=${DATE:4}; x=${x:2};;
-            _Y*) CENTURY=$(echo "${DATE:0:2}" | tr ' ' '0'); YEAR_IN_CENTURY=${DATE:2:2}; DATE=${DATE:4}; x=${x:2};;
-
-            y*)  YEAR_IN_CENTURY=${DATE:0:2}; DATE=${DATE:2}; x=${x:1};;
-            0y*) YEAR_IN_CENTURY=${DATE:0:2}; DATE=${DATE:2}; x=${x:2};;
-            _y*) YEAR_IN_CENTURY=$(echo "${DATE:0:2}" | tr ' ' '0'); DATE=${DATE:2}; x=${x:2};;
-
-            C*)  CENTURY=${DATE:0:2}; DATE=${DATE:2}; x=${x:1};;
-            0C*) CENTURY=${DATE:0:2}; DATE=${DATE:2}; x=${x:2};;
-            _C*) CENTURY=$(echo "${DATE:0:2}" | tr ' ' '0'); DATE=${DATE:2}; x=${x:2};;
-
-            m*)  MONTH=${DATE:0:2}; DATE=${DATE:2}; x=${x:1};;
-            0m*) MONTH=${DATE:0:2}; DATE=${DATE:2}; x=${x:2};;
-            _m*) MONTH=$(echo "${DATE:0:2}" | tr ' ' '0'); DATE=${DATE:2}; x=${x:2};;
-            e*)  MONTH=$(echo "${DATE:0:2}" | tr ' ' '0'); DATE=${DATE:2}; x=${x:1};;
-            0e*) MONTH=${DATE:0:2}; DATE=${DATE:2}; x=${x:2};;
-            _e*) MONTH=$(echo "${DATE:0:2}" | tr ' ' '0'); DATE=${DATE:2}; x=${x:2};;
-            b*)  MONTH="$(date -d "${DATE:0:3} 1 2000" '+%m')"; DATE=${DATE:3}; x=${x:1};;
-            h*)  MONTH="$(date -d "${DATE:0:3} 1 2000" '+%m')"; DATE=${DATE:3}; x=${x:1};;
-
-            d*)  DAY=${DATE:0:2}; DATE=${DATE:2}; x=${x:1};;
-            0d*) DAY=${DATE:0:2}; DATE=${DATE:2}; x=${x:2};;
-            _d*) DAY=$(echo "${DATE:0:2}" | tr ' ' '0'); DATE=${DATE:2}; x=${x:2};;
-
-            j*)  DAY_OF_YEAR=${DATE:0:3}; DATE=${DATE:3}; x=${x:1};;
-            0j*) DAY_OF_YEAR=${DATE:0:3}; DATE=${DATE:3}; x=${x:2};;
-            _j*) DAY_OF_YEAR=$(echo "${DATE:0:3}" | tr ' ' '0'); DATE=${DATE:3}; x=${x:2};;
-
-            D*) MONTH=${DATE:0:2}; DAY=${DATE:3:2}; YEAR_IN_CENTURY=${DATE:6:2}; DATE=${DATE:8}; x=${x:1};;
-            F*) CENTURY=${DATE:0:2}; YEAR_IN_CENTURY=${DATE:2:2}; MONTH=${DATE:5:2}; DAY=${DATE:8:2}; DATE=${DATE:10}; x=${x:1};;
-            *) >&2 echo "$FUNCNAME (${LINENO}): Unsupported date format: ${BUILD_AVOIDANCE_DATE_FORMAT}"; return 1;;
-        esac
-
-        # consume remaing non-interpreted content
-        if [ "$(echo "${DATE:0:${#x}}" |  tr ' ' '#')" != "${x}" ]; then
-            >&2 echo "$FUNCNAME (${LINENO}): Unexpected content '${DATE:0:${#x}}' does not match expected '${x}': '$1' being parsed vs '${BUILD_AVOIDANCE_DATE_FORMAT}'"
-            return 1
-        fi
-        DATE=${DATE:${#x}}
-    done
-
-    if [ "${DAY_OF_YEAR}" != "" ]; then
-        local YEAR_SEC
-        local DOY_SEC
-        YEAR_SEC="$(date -d "${CENTURY}${YEAR_IN_CENTURY}-01-01" '+%s')"
-        DOY_SEC=$((YEAR_SEC+(DAY_OF_YEAR-1)*24*60*60))
-        MONTH="$(date "@$DOY_SEC" "+%m")"
-        DAY="$(date "@$DOY_SEC" "+%d")"
-    fi
-
-    echo "${CENTURY}${YEAR_IN_CENTURY}-${MONTH}-${DAY}"
-    return 0
-}
-
-time_to_iso_8601 () {
-    TIME="$1"
-    local HOUR="00"
-    local H12=""
-    local AMPM=""
-    local MINUTE="00"
-    local SECOND="00"
-
-    CENTURY="$(date  '+%C')"
-
-    for x in $(echo "${BUILD_AVOIDANCE_TIME_FORMAT}" | tr ' ' '#' | sed 's/%%/#/g' | tr '%' ' ' ); do
-        # Consume format case options
-        case ${x:0:1} in
-            ^) x=${x:1};;
-            \#) x=${x:1};;
-            *) ;;
-        esac
-
-        # Process format
-        case $x in
-            H*)  HOUR=${TIME:0:2}; TIME=${TIME:2}; x=${x:1};;
-            0H*) HOUR=${TIME:0:2}; TIME=${TIME:2}; x=${x:2};;
-            _H*) HOUR="$(echo "${TIME:0:2}" | tr ' ' '0')"; TIME=${TIME:2}; x=${x:2};;
-            k*)  HOUR="$(echo "${TIME:0:2}" | tr ' ' '0')"; TIME=${TIME:2}; x=${x:1};;
-            0k*) HOUR=${TIME:0:2}; TIME=${TIME:2}; x=${x:2};;
-            _k*) HOUR="$(echo "${TIME:0:2}" | tr ' ' '0')"; TIME=${TIME:2}; x=${x:2};;
-
-            I*)  H12=${TIME:0:2}; TIME=${TIME:2}; x=${x:1};;
-            0I*) H12=${TIME:0:2}; TIME=${TIME:2}; x=${x:2};;
-            _I*) H12="$(echo "${TIME:0:2}" | tr ' ' '0')"; TIME=${TIME:2}; x=${x:2};;
-            l*)  H12="$(echo "${TIME:0:2}" | tr ' ' '0')"; TIME=${TIME:2}; x=${x:1};;
-            0l*) H12=${TIME:0:2}; TIME=${TIME:2}; x=${x:2};;
-            _l*) H12="$(echo "${TIME:0:2}" | tr ' ' '0')"; TIME=${TIME:2}; x=${x:2};;
-            p*) AMPM=${TIME:0:2}; TIME=${TIME:2}; x=${x:1};;
-
-            M*)  MINUTE=${TIME:0:2}; TIME=${TIME:2}; x=${x:1};;
-            0M*) MINUTE=${TIME:0:2}; TIME=${TIME:2}; x=${x:2};;
-            _M*) MINUTE="$(echo "${TIME:0:2}" | tr ' ' '0')"; TIME=${TIME:2}; x=${x:2};;
-
-            S*)  SECOND=${TIME:0:2}; TIME=${TIME:2}; x=${x:1};;
-            0S*) SECOND=${TIME:0:2}; TIME=${TIME:2}; x=${x:2};;
-            _S*) SECOND="$(echo "${TIME:0:2}" | tr ' ' '0')"; TIME=${TIME:2}; x=${x:2};;
-
-            R*) HOUR=${TIME:0:2}; MINUTE=${TIME:3:2} TIME=${TIME:5}; x=${x:1};;
-            r*) H12=${TIME:0:2}; MINUTE=${TIME:3:2}; SECOND=${TIME:6:2}; AMPM=${TIME:9:2}; TIME=${TIME:11}; x=${x:1};;
-            T*) HOUR=${TIME:0:2}; MINUTE=${TIME:3:2}; SECOND=${TIME:6:2}; TIME=${TIME:8}; x=${x:1};;
-
-            *) >&2 echo "$FUNCNAME (${LINENO}): Unsupported time format: ${BUILD_AVOIDANCE_TIME_FORMAT}"; return 1;;
-        esac
-
-        # consume remaing non-interpreted content
-        if [ "$(echo "${TIME:0:${#x}}" |  tr ' ' '#')" != "${x}" ]; then
-            >&2 echo "$FUNCNAME (${LINENO}): Unexpected content '${TIME:0:${#x}}' does not match expected '${x}': '$1' being parsed vs '${BUILD_AVOIDANCE_TIME_FORMAT}'"
-            return 1
-        fi
-        TIME=${TIME:${#x}}
-    done
-
-    if [ "$H12" != "" ] && [ "$AMPM" != "" ]; then
-        HOUR="$(date "$H12:01:01 $AMPM" '+%H')"
-    else
-        if [ "$H12" != "" ] && [ "$AMPM" != "" ]; then
-            >&2 echo "$FUNCNAME (${LINENO}): Unsupported time format: ${BUILD_AVOIDANCE_TIME_FORMAT}"
-            return 1
-        fi
-    fi
-
-    echo "${HOUR}:${MINUTE}:${SECOND}"
-    return 0
-}
-
-date_time_to_iso_8601 () {
-    local DATE_TIME="$1"
-    local DATE
-    local TIME
-    local DECODED_DATE
-    local DECODED_TIME
-    DATE=$(echo "${DATE_TIME}" | cut -d ${BUILD_AVOIDANCE_DATE_TIME_DELIM} -f 1)
-    TIME=$(echo "${DATE_TIME}" | cut -d ${BUILD_AVOIDANCE_DATE_TIME_DELIM} -f 2 | sed "s#${BUILD_AVOIDANCE_DATE_TIME_POSTFIX}\$##")
-    DECODED_DATE=$(date_to_iso_8601 "${DATE}")
-    DECODED_TIME=$(time_to_iso_8601 "${TIME}")
-    echo "${DECODED_DATE}T${DECODED_TIME}$(date $UTC '+%:z')"
-}
-
-#
-# test_build_avoidance_context <path-to-context-file>
-#
-# Is the provided context file compatible with the current
-# state of all of our gits?  A compatible context is one
-# where every commit in the context file is visible in our
-# current git history.
-#
-# Returns: Timestamp of context tested.
-# Exit code: 0 = Compatible
-#            1 = This context is older than the last applied
-#                build avoidance context.  If you are searching
-#                newest to oldest, you might as well stop.
-#            2 = Not compatible
-#
-test_build_avoidance_context () {
-    local context="$1"
-    local BA_LAST_SYNC_CONTEXT="$2"
-    local BA_CONTEXT=""
-
-    BA_CONTEXT=$(basename $context | cut -d '.' -f 1)
-    >&2 echo "test: $BA_CONTEXT"
-
-    if [ "$BA_CONTEXT" == "$BA_LAST_SYNC_CONTEXT" ]; then
-        # Stop the search.  We've reached the last sync point
-        BA_CONTEXT=""
-        echo "$BA_CONTEXT"
-        return 1
-    fi
-
-    git_test_context "$context"
-    result=$?
-    if [ $result -eq 0 ]; then
-        # found a new context !!!
-        echo "$BA_CONTEXT"
-        return 0
-    fi
-
-    # Continue the search
-    BA_CONTEXT=""
-    echo "$BA_CONTEXT"
-    return 2
-}
-
-
-#
-# get_build_avoidance_context
-#
-# Return URL of the most recent jenkins build that is compatable with
-# the current software context under $MY_REPO.
-#
-get_build_avoidance_context () {
-    (
-    local BUILD_TYPE=$1
-    local context
-    local BA_CONTEXT=""
-    local BA_LAST_SYNC_CONTEXT=""
-
-    export BUILD_AVOIDANCE_LAST_SYNC_FILE="$(build_avoidance_last_sync_file $BUILD_TYPE)"
-    mkdir -p "$(dirname $BUILD_AVOIDANCE_LAST_SYNC_FILE)"
-
-    # Load last synced context
-    if [ -f $BUILD_AVOIDANCE_LAST_SYNC_FILE ]; then
-        BA_LAST_SYNC_CONTEXT=$(head -n 1 $BUILD_AVOIDANCE_LAST_SYNC_FILE)
-    fi
-
-    mkdir -p $BUILD_AVOIDANCE_DATA_DIR
-    if [ $? -ne 0 ]; then
-        >&2 echo "Error: $FUNCNAME (${LINENO}): mkdir -p $BUILD_AVOIDANCE_DATA_DIR"
-        return 1
-    fi
-
-    local REMOTE_CTX_DIR="context"
-    local LOCAL_CTX_DIR="$BUILD_AVOIDANCE_DATA_DIR/context"
-
-    # First copy the directory containing all the context files for
-    # the reference builds.
-    >&2 echo "Download latest reference build contexts"
-
-    # Must set this prior to build_avoidance_copy_dir.
-    # The setting is not exported outside of the subshell.
-    if [ -z "$BUILD_AVOIDANCE_HOST" ]; then
-        BUILD_AVOIDANCE_URL="$BUILD_AVOIDANCE_DIR"
-    else
-        BUILD_AVOIDANCE_URL="$BUILD_AVOIDANCE_HOST:$BUILD_AVOIDANCE_DIR"
-    fi
-
-
-    build_avoidance_copy_dir "$REMOTE_CTX_DIR" "$LOCAL_CTX_DIR"
-    if [ $? -ne 0 ]; then
-        >&2 echo "Error: $FUNCNAME (${LINENO}): build_avoidance_copy_dir '$REMOTE_CTX_DIR' '$LOCAL_CTX_DIR'"
-        return 1
-    fi
-
-    # Search for a new context to sync
-    cd $MY_REPO
-
-    if [ "$BUILD_AVOIDANCE_DAY" == "" ]; then
-        # Normal case:
-        # Search all contexts, newest to oldest, for a good context.
-        for context in $(ls -1rd $LOCAL_CTX_DIR/*context); do
-            >&2 echo "context=$context"
-            BA_CONTEXT=$(test_build_avoidance_context $context $BA_LAST_SYNC_CONTEXT)
-            if [ $? -le 1 ]; then
-                # Stop search.  Might or might not have found a good context.
-                break;
-            fi
-        done
-    else
-        # Special case when a target day is specified.  Why would we do this?
-        # Reason is we might want the reference build to itself use build
-        # avoidance referencing prior builds of itself, except for one build
-        # a week when we use a full build rather than a build avoidance build.
-        #    e.g.   Sunday - full build
-        #           Mon-Sat - avoidance builds that refernce Sunday build.
-        #
-        # Starting from last <TARG_DAY> (e.g. "Sunday"), search newest to
-        # oldest for a good context.  If none found, increment the target
-        # day (e.g. Monday) and search again.  Keep incrementing until a
-        # good build is found, or target day + offset days would be a date
-        # in the furure.
-        #
-        local TARG_DAY=$BUILD_AVOIDANCE_DAY
-        local TODAY_DATE
-        local TODAY_DAY
-        local TARG_DATE=""
-        local TARG_TS
-        local TODAY_TS
-
-        TODAY_DATE=$(date  $UTC +%Y-%m-%d)
-        TODAY_DAY=$(date $UTC "+%A")
-
-        for OFFSET_DAYS in 0 1 2 3 4 5 6; do
-            if [ "$TARG_DAY" != "" ]; then
-                # Convert TARG_DAY+OFFSET_DAYS to TARG_DATE
-
-                if [ "$TODAY_DAY" == "$TARG_DAY" ]; then
-                    TARG_DATE=$(date $UTC -d"$TARG_DAY+$OFFSET_DAYS days" +%Y-%m-%d)
-                else
-                    TARG_DATE=$(date $UTC -d"last-$TARG_DAY+$OFFSET_DAYS days" +%Y-%m-%d)
-                fi
-                >&2 echo "TARG_DATE=$TARG_DATE"
-
-                TARG_TS=$(date $UTC -d "$TARG_DATE" +%s)
-                TODAY_TS=$(date $UTC -d "$TODAY_DATE" +%s)
-                if [ $TARG_TS -gt $TODAY_TS ]; then
-                    # Skip if offset has pushed us into future dates
-                    continue;
-                fi
-
-                if [ "$TARG_DATE" == "$TODAY_DATE" ]; then
-                    TARG_DATE=""
-                fi
-            fi
-
-            # Search build, newest to oldest, satisfying TARG_DATE
-            for f in $(ls -1rd $LOCAL_CTX_DIR/*context); do
-                DATE=$(date_to_iso_8601 $(basename "$f"))
-                if [ $? -ne 0 ]; then
-                    >&2 echo "Failed to extract date from filename '$(basename "$f")', ignoring file"
-                    continue
-                fi
-
-                >&2 echo "   DATE=$DATE, TARG_DATE=$TARG_DATE"
-
-                if [ "$DATE" == "$TARG_DATE" ] || [ "$TARG_DATE" == "" ] ; then
-                    context=$f;
-                else
-                    continue
-                fi
-
-                >&2 echo "context=$context"
-
-                BA_CONTEXT=$(test_build_avoidance_context $context $BA_LAST_SYNC_CONTEXT)
-
-                if [ $? -le 1 ]; then
-                    # Stop search.  Might or might not have found a good context.
-                    break;
-                fi
-            done
-
-            if [ "$BA_CONTEXT" != "" ]; then
-                # Found a good context.
-                break
-            fi
-        done
-    fi
-
-    if [ "$BA_CONTEXT" == "" ]; then
-        # No new context found
-        return 1
-    fi
-
-    # test that the reference build context hasn't been deleted
-    local BA_CONTEXT_DIR="$BUILD_AVOIDANCE_DIR/$BA_CONTEXT"
-
-    if [ -z "$BUILD_AVOIDANCE_HOST" ]; then
-        >&2 echo "[ -d $BA_CONTEXT_DIR ]"
-        if ! [ -d $BA_CONTEXT_DIR ] ; then
-            return 1
-        fi
-    else
-        >&2 echo "ssh $BUILD_AVOIDANCE_HOST '[ -d $BA_CONTEXT_DIR ]'"
-        if ! ssh $BUILD_AVOIDANCE_HOST '[ -d $BA_CONTEXT_DIR ]' ; then
-            return 1
-        fi
-    fi
-
-    # Save the latest context
-    >&2 echo "BA_CONTEXT=$BA_CONTEXT"
-    >&2 echo "BUILD_AVOIDANCE_LAST_SYNC_FILE=$BUILD_AVOIDANCE_LAST_SYNC_FILE"
-    echo $BA_CONTEXT > $BUILD_AVOIDANCE_LAST_SYNC_FILE
-
-    # The location of the load with the most compatable new context
-    if [ -z "$BUILD_AVOIDANCE_HOST" ]; then
-        URL=$BA_CONTEXT_DIR
-    else
-        URL=$BUILD_AVOIDANCE_HOST:$BA_CONTEXT_DIR
-    fi
-
-    # return URL to caller.
-    echo $URL
-    return 0
-    )
-}
-
-
-#
-# build_avoidance_pre_clean <build-type>
-#
-# A place for any cleanup actions that must preceed a build avoidance build.
-#
-build_avoidance_pre_clean () {
-    local BUILD_TYPE="$1"
-
-    if [ "$BUILD_TYPE" == "" ]; then
-        >&2 echo "Error: $FUNCNAME (${LINENO}): BUILD_TYPE required"
-        return 1
-    fi
-
-    # clean prior builds
-    if [ -d $MY_WORKSPACE/$BUILD_TYPE ]; then
-        build-pkgs --clean --$BUILD_TYPE --no-build-avoidance
-        if [ $? -ne 0 ]; then
-            return 1
-        fi
-    fi
-
-    for f in $BUILD_AVOIDANCE_SRPM_FILES $BUILD_AVOIDANCE_RPM_FILES; do
-        if [ -f $MY_WORKSPACE/$BUILD_TYPE/$f ]; then
-            \rm -f $MY_WORKSPACE/$BUILD_TYPE/$f
-            if [ $? -ne 0 ]; then
-                >&2 echo "Error: $FUNCNAME (${LINENO}): rm -f $MY_WORKSPACE/$BUILD_TYPE/$f"
-                return 1
-            fi
-        fi
-    done
-
-    for d in $BUILD_AVOIDANCE_SRPM_DIRECTORIES $BUILD_AVOIDANCE_RPM_DIRECTORIES; do
-
-        if [ -d $MY_WORKSPACE/$BUILD_TYPE/$d ]; then
-            \rm -rf $MY_WORKSPACE/$BUILD_TYPE/$d
-            if [ $? -ne 0 ]; then
-                >&2 echo "Error: $FUNCNAME (${LINENO}): rm -rf $MY_WORKSPACE/$BUILD_TYPE/$d"
-                return 1
-            fi
-        fi
-    done
-
-    return 0
-}
-
-
-#
-# build_avoidance_copy_dir_rsync <remote-dir-path-rel> <local-dir-path> ['verbose']
-#
-# Copy a file from $BUILD_AVOIDANCE_URL/<remote-dir-path-rel>
-# to <local-dir-path> using rsync.
-#
-build_avoidance_copy_dir_rsync () {
-    local FROM="$1"
-    local TO="$2"
-    local VERBOSE="$3"
-    local FLAGS="-a -u"
-
-    if [ "$BUILD_AVOIDANCE_URL" == "" ]; then
-        >&2 echo "Error: $FUNCNAME (${LINENO}): BUILD_AVOIDANCE_URL no set"
-        return 1
-    fi
-
-    if [ "$VERBOSE" != "" ]; then
-        FLAGS="$FLAGS -v"
-        echo "rsync $FLAGS '$BUILD_AVOIDANCE_URL/$FROM/' '$TO/'"
-    fi
-
-    rsync $FLAGS "$BUILD_AVOIDANCE_URL/$FROM/" "$TO/"
-    if [ $? -ne 0 ]; then
-        >&2 echo "Error: $FUNCNAME (${LINENO}): command failed: rsync $FLAGS '$BUILD_AVOIDANCE_URL/$FROM/' '$TO/'"
-        return 1
-    fi
-
-    chmod -R 'ug+w' "$TO/"
-    if [ $? -ne 0 ]; then
-        >&2 echo "Error: $FUNCNAME (${LINENO}): command failed: chmod -R 'ug+w' '$TO/'"
-        return 1
-    fi
-    return 0
-}
-
-#
-# build_avoidance_copy_file_rsync <remote-file-path-rel> <local-file-path> ['verbose']
-#
-# Copy a file from $BUILD_AVOIDANCE_URL/<remote-file-path-rel>
-# to <local-file-path> using rsync.
-#
-build_avoidance_copy_file_rsync () {
-    local FROM="$1"
-    local TO="$2"
-    local VERBOSE="$3"
-    local FLAGS="-a -u"
-
-    if [ "$BUILD_AVOIDANCE_URL" == "" ]; then
-        >&2 echo "Error: $FUNCNAME (${LINENO}): BUILD_AVOIDANCE_URL no set"
-        return 1
-    fi
-    if [ "$VERBOSE" != "" ]; then
-        FLAGS="$FLAGS -v"
-        echo "rsync $FLAGS '$BUILD_AVOIDANCE_URL/$FROM' '$TO'"
-    fi
-
-    rsync $FLAGS "$BUILD_AVOIDANCE_URL/$FROM" "$TO"
-    if [ $? -ne 0 ]; then
-        >&2 echo "Error: $FUNCNAME (${LINENO}): command failed: rsync $FLAGS '$BUILD_AVOIDANCE_URL/$FROM' '$TO'"
-        return 1
-    fi
-
-    chmod -R 'ug+w' "$TO"
-    if [ $? -ne 0 ]; then
-        >&2 echo "Error: $FUNCNAME (${LINENO}): command failed: chmod -R 'ug+w' '$TO'"
-        return 1
-    fi
-    return $?
-}
-
-#
-# build_avoidance_copy_dir <remote-dir-path-rel> <local-dir-path> ['verbose']
-#
-# Copy a file from $BUILD_AVOIDANCE_URL/<remote-dir-path-rel>
-# to <local-dir-path>.  The copy method will be determined by
-# BUILD_AVOIDANCE_FILE_TRANSFER.  Only 'rsync' is supported at present.
-#
-# <local-dir-path> should be a directory,
-# mkdir -p will be called on <local-file-path>.
-#
-build_avoidance_copy_dir () {
-    local FROM="$1"
-    local TO="$2"
-    local VERBOSE="$3"
-
-    if [ "$VERBOSE" != "" ]; then
-        echo "mkdir -p '$TO'"
-    fi
-    mkdir -p "$TO"
-    if [ $? -ne 0 ]; then
-        >&2 echo "Error: $FUNCNAME (${LINENO}): mkdir -p $TO"
-        return 1
-    fi
-
-    case ${BUILD_AVOIDANCE_FILE_TRANSFER} in
-        rsync)
-            build_avoidance_copy_dir_rsync "$FROM" "$TO" "$VERBOSE"
-            return $?
-            ;;
-        *)
-            >&2 echo "Error: $FUNCNAME (${LINENO}): Unknown BUILD_AVOIDANCE_FILE_TRANSFER '${BUILD_AVOIDANCE_FILE_TRANSFER}'"
-            return 1
-            ;;
-    esac
-    return 1
-}
-
-#
-# build_avoidance_copy_file <remote-file-path-rel> <local-file-path> ['verbose']
-#
-# Copy a file from $BUILD_AVOIDANCE_URL/<remote-file-path-rel>
-# to <local-file-path>.  The copy method will be determined by
-# BUILD_AVOIDANCE_FILE_TRANSFER.  Only 'rsync' is supported at present.
-#
-# <local-file-path> should be a file, not a directory,
-# mkdir -p will be called on $(dirname <local-file-path>)
-#
-build_avoidance_copy_file () {
-    local FROM="$1"
-    local TO="$2"
-    local VERBOSE="$3"
-
-    if [ "$VERBOSE" != "" ]; then
-        echo "mkdir -p $(dirname '$TO')"
-    fi
-    mkdir -p "$(dirname "$TO")"
-    if [ $? -ne 0 ]; then
-        >&2 echo "Error: $FUNCNAME (${LINENO}): mkdir -p $(dirname "$TO")"
-        return 1
-    fi
-
-    case ${BUILD_AVOIDANCE_FILE_TRANSFER} in
-        rsync)
-            build_avoidance_copy_file_rsync "$FROM" "$TO" "$VERBOSE"
-            return $?
-            ;;
-        *)
-            >&2 echo "Error: $FUNCNAME (${LINENO}): Unknown BUILD_AVOIDANCE_FILE_TRANSFER '${BUILD_AVOIDANCE_FILE_TRANSFER}'"
-            return 1
-            ;;
-    esac
-    return 1
-}
-
-#
-# build_avoidance_copy <build-type> ['verbose']
-#
-# Copy the needed build artifacts for <build-type> from $BUILD_AVOIDANCE_URL.
-#
-build_avoidance_copy () {
-    local BUILD_TYPE="$1"
-    local VERBOSE="$2"
-
-    if [ "$BUILD_TYPE" == "" ]; then
-        >&2 echo "Error: $FUNCNAME (${LINENO}): BUILD_TYPE required"
-        return 1
-    fi
-
-    # Iterate through list of directories to copy
-    for d in $BUILD_AVOIDANCE_SRPM_DIRECTORIES $BUILD_AVOIDANCE_RPM_DIRECTORIES; do
-        build_avoidance_copy_dir "$BUILD_TYPE/$d" "$MY_WORKSPACE/$BUILD_TYPE/$d" "$VERBOSE"
-        if [ $? -ne 0 ]; then
-            >&2 echo "Error: $FUNCNAME (${LINENO}): build_avoidance_copy_dir '$BUILD_TYPE/$d' '$MY_WORKSPACE/$BUILD_TYPE/$d'"
-            return 1
-        fi
-    done
-
-    # Iterate through list of files to copy
-    for f in $BUILD_AVOIDANCE_SRPM_FILES $BUILD_AVOIDANCE_RPM_FILES; do
-        build_avoidance_copy_file "$BUILD_TYPE/$f" "$MY_WORKSPACE/$BUILD_TYPE/$f" "$VERBOSE"
-        if [ $? -ne 0 ]; then
-            >&2 echo "Error: $FUNCNAME (${LINENO}): build_avoidance_copy_file '$BUILD_TYPE/$f' '$MY_WORKSPACE/$BUILD_TYPE/$f'"
-            return 1
-        fi
-    done
-
-    return 0
-}
-
-#
-# build_avoidance_fixups <build-type>
-#
-# Fix paths in the build artifacts that we coppied that contain
-# the user name.
-#
-# Also, our credentials may differ from the reference build,
-# so substitute unsigned packages in place of signed packages.
-#
-build_avoidance_fixups () {
-    local BUILD_TYPE="$1"
-
-    local BA_SOURCE_BUILD_ENVIRONMENT
-    BA_SOURCE_BUILD_ENVIRONMENT="${BUILD_AVOIDANCE_USR}-$(basename $(dirname $BUILD_AVOIDANCE_URL))-$(basename $BUILD_AVOIDANCE_URL)-${SRC_BUILD_ENVIRONMENT}"
-    local RESULT_DIR=""
-    local FROM_DIR=""
-    local TO_DIR=""
-    local rpm_path_post_signing
-    local rpm_path_pre_signing
-    local rpm_name
-    local md5sum_post_signing
-    local md5sum_pre_signing
-
-    if [ "$BUILD_TYPE" == "" ]; then
-        >&2 echo "Error: $FUNCNAME (${LINENO}): BUILD_TYPE required"
-        return 1
-    fi
-
-    RESULT_DIR="$MY_WORKSPACE/$BUILD_TYPE/results"
-    FROM_DIR="${RESULT_DIR}/${BA_SOURCE_BUILD_ENVIRONMENT}-${BUILD_TYPE}"
-    TO_DIR="${RESULT_DIR}/${MY_BUILD_ENVIRONMENT}-${BUILD_TYPE}"
-    echo "$FUNCNAME: FROM_DIR=$FROM_DIR"
-    echo "$FUNCNAME: TO_DIR=$TO_DIR"
-    echo "$FUNCNAME: MY_BUILD_ENVIRONMENT=$MY_BUILD_ENVIRONMENT"
-
-    # Fix patchs the use MY_BUILD_ENVIRONMENT
-    if [ ! -d "$FROM_DIR" ]; then
-        >&2 echo "Error: $FUNCNAME (${LINENO}): Expected directory '$FROM_DIR' is missing."
-        return 1
-    fi
-
-    echo "$FUNCNAME: mv '$FROM_DIR' '$TO_DIR'"
-    \mv "$FROM_DIR" "$TO_DIR"
-    if [ $? -ne 0 ]; then
-        >&2 echo "Error: $FUNCNAME (${LINENO}): mv '$FROM_DIR' '$TO_DIR'"
-        return 1
-    fi
-
-    local MY_WS_BT="$MY_WORKSPACE/$BUILD_TYPE"
-
-    # Replace signed rpms with non-signed copies .... we aren't a formal build
-    for rpm_path_post_signing in $(find $MY_WS_BT/rpmbuild/RPMS -type f -name '*.rpm' | grep -v src.rpm); do
-
-        rpm_name=$(basename $rpm_path_post_signing)
-        rpm_path_pre_signing=$(find $MY_WS_BT/results -name $rpm_name | head -n1)
-        if [ "$rpm_path_pre_signing" != "" ]; then
-            md5sum_post_signing=$(md5sum ${rpm_path_post_signing} | cut -d ' ' -f 1)
-            md5sum_pre_signing=$(md5sum ${rpm_path_pre_signing} | cut -d ' ' -f 1)
-            if [ "${md5sum_post_signing}" != "${md5sum_pre_signing}" ]; then
-                echo "$FUNCNAME: fixing $rpm_name"
-                \rm -f ${rpm_path_post_signing}
-                if [ $? -ne 0 ]; then
-                    >&2 echo "Error: $FUNCNAME (${LINENO}): rm -f ${rpm_path_post_signing}"
-                    return 1
-                fi
-
-                \cp ${rpm_path_pre_signing} ${rpm_path_post_signing}
-                if [ $? -ne 0 ]; then
-                    >&2 echo "Error: $FUNCNAME (${LINENO}): cp ${rpm_path_pre_signing} ${rpm_path_post_signing}"
-                    return 1
-                fi
-            fi
-        fi;
-    done
-
-    return 0
-}
-
-
-#
-# build_avoidance <build-type>
-#
-# Look for a reference build that is applicable to our current git context.
-# and copy it to our local workspace, if we haven't already done so.
-#
-build_avoidance () {
-    local BUILD_TYPE="$1"
-
-    echo "==== Build Avoidance Start ===="
-
-    export BUILD_AVOIDANCE_LAST_SYNC_FILE="$(build_avoidance_last_sync_file $BUILD_TYPE)"
-    mkdir -p "$(dirname $BUILD_AVOIDANCE_LAST_SYNC_FILE)"
-
-    if [ "$BUILD_TYPE" == "" ]; then
-        >&2 echo "Error: $FUNCNAME (${LINENO}): BUILD_TYPE required"
-        return 1
-    fi
-
-    if [ ! -d $MY_WORKSPACE/$BUILD_TYPE ]; then
-        mkdir -p $MY_WORKSPACE/$BUILD_TYPE
-        if [ $? -ne 0 ]; then
-            >&2 echo "Error: $FUNCNAME (${LINENO}): Failed to create directory $MY_WORKSPACE/$BUILD_TYPE"
-            return 1
-        fi
-    fi
-
-    if [ ! -L $MY_WORKSPACE/$BUILD_TYPE/repo ]; then
-        ln -s $MY_REPO $MY_WORKSPACE/$BUILD_TYPE/repo
-        if [ $? -ne 0 ]; then
-            >&2 echo "Error: $FUNCNAME (${LINENO}): Failed to create symlink $MY_WORKSPACE/$BUILD_TYPE/repo -> $MY_REPO"
-            return 1
-        fi
-    fi
-
-    build_avoidance_pre_clean $BUILD_TYPE
-    if [ $? -ne 0 ]; then
-        >&2 echo "Error: $FUNCNAME (${LINENO}): build_avoidance_pre_clean $BUILD_TYPE"
-        return 1
-    fi
-
-    build_avoidance_copy $BUILD_TYPE 'verbose'
-    if [ $? -ne 0 ]; then
-        >&2 echo "Error: $FUNCNAME (${LINENO}): build_avoidance_copy $BUILD_TYPE"
-        return 1
-    fi
-
-    build_avoidance_fixups $BUILD_TYPE
-    if [ $? -ne 0 ]; then
-        >&2 echo "Error: $FUNCNAME (${LINENO}): build_avoidance_fixups $BUILD_TYPE"
-        return 1
-    fi
-
-    echo "==== Build Avoidance Complete ===="
-    return 0
-}
-
-#
-# build_avoidance_save_reference_context
-#
-# For use by a reference build.  Copy the 'CONTEXT' file
-# from the build into a central directory where we save
-# the context of old builds.
-#
-# Individual reference builds use:
-#     MY_WORKSPACE=<common-dir>/<timestamp>
-# and context files are collected in dir:
-#     DEST_CTX_DIR=<common-dir>/context
-# using name:
-#     DEST_CTX=<timestamp>.context
-
-build_avoidance_save_reference_context () {
-    local DIR
-    DIR=$(dirname "${MY_WORKSPACE}")
-
-    # Note: SUB_DIR should be a timestamp
-    local SUB_DIR
-    SUB_DIR=$(basename "${MY_WORKSPACE}")
-
-    local SRC_CTX="${MY_WORKSPACE}/CONTEXT"
-    local DEST_CTX_DIR="${DIR}/context"
-    local DEST_CTX="${DEST_CTX_DIR}/${SUB_DIR}.context"
-
-    if [ ! -f "${SRC_CTX}" ]; then
-        echo "Context file not found at '${SRC_CTX}'"
-        return 1
-    fi
-
-    mkdir -p "${DEST_CTX_DIR}"
-    if [ $? -ne 0 ]; then
-        echo "Error: $FUNCNAME (${LINENO}): Failed to create directory '${DEST_CTX_DIR}'"
-        return 1
-    fi
-
-    cp "${SRC_CTX}" "${DEST_CTX}"
-    if [ $? -ne 0 ]; then
-        echo "Error: $FUNCNAME (${LINENO}): Failed to copy ${SRC_CTX} -> ${DEST_CTX}"
-        return 1
-    fi
-
-    return 0
-}
diff --git a/build-tools/build-docker-images/README b/build-tools/build-docker-images/README
index e961232e..c5dbf6ed 100644
--- a/build-tools/build-docker-images/README
+++ b/build-tools/build-docker-images/README
@@ -3,21 +3,22 @@
 PRIVATE_REGISTRY_USERID=myuser
 PRIVATE_REGISTRY=xxx.xxx.xxx.xxx:9001
 VERSION=2018.11.13
-OS=centos
+OS=debian
 OS_VERSION=7.5.1804
 BUILD_STREAM=stable
 HOST_PORT=8088
+PUBLISH_URL=https://mirror.starlingx.windriver.com/mirror/starlingx/master/${OS}/monolithic/latest_build/
 
-## Step 1: Build stx-centos
+## Step 1: Build stx-debian
 time $MY_REPO/build-tools/build-docker-images/build-stx-base.sh \
     --os ${OS} \
     --os-version ${OS_VERSION} \
     --version ${VERSION} \
     --user ${PRIVATE_REGISTRY_USERID} \
     --registry ${PRIVATE_REGISTRY} \
+    --repo 'deb [trusted=yes check-valid-until=0] $PUBLISH_URL/inputs/packages ./'
+    --repo 'deb [trusted=yes check-valid-until=0] $PUBLISH_URL/outputs/std/packages ./'
     --push \
-    --repo stx-local-build,http://${HOSTNAME}:${HOST_PORT}/${MY_WORKSPACE}/std/rpmbuild/RPMS \
-    --repo stx-mirror-distro,http://${HOSTNAME}:${HOST_PORT}/${MY_REPO}/cgcs-root/cgcs-${OS}-repo/Binary \
     --clean
 
 
diff --git a/build-tools/build-docker-images/base-image-build-centos-dev.cfg b/build-tools/build-docker-images/base-image-build-centos-dev.cfg
deleted file mode 100644
index e4ea3cd6..00000000
--- a/build-tools/build-docker-images/base-image-build-centos-dev.cfg
+++ /dev/null
@@ -1,2 +0,0 @@
-# one option per line, option=value
-repo=ussuri-wsgi,https://mirror.starlingx.windriver.com/mirror/centos/centos/mirror.centos.org/centos/7/sclo/x86_64/rh/
diff --git a/build-tools/build-docker-images/base-image-build-centos-stable.cfg b/build-tools/build-docker-images/base-image-build-centos-stable.cfg
deleted file mode 100644
index e4ea3cd6..00000000
--- a/build-tools/build-docker-images/base-image-build-centos-stable.cfg
+++ /dev/null
@@ -1,2 +0,0 @@
-# one option per line, option=value
-repo=ussuri-wsgi,https://mirror.starlingx.windriver.com/mirror/centos/centos/mirror.centos.org/centos/7/sclo/x86_64/rh/
diff --git a/build-tools/build-docker-images/build-stx-base.sh b/build-tools/build-docker-images/build-stx-base.sh
index acb60a00..7acd62f5 100755
--- a/build-tools/build-docker-images/build-stx-base.sh
+++ b/build-tools/build-docker-images/build-stx-base.sh
@@ -18,7 +18,7 @@ if [ -z "${MY_WORKSPACE}" -o -z "${MY_REPO}" ]; then
     exit 1
 fi
 
-SUPPORTED_OS_ARGS=('centos' 'debian')
+SUPPORTED_OS_ARGS=( 'debian' )
 OS=                      # default: autodetect
 OS_VERSION=              # default: lookup "ARG RELEASE" in Dockerfile
 BUILD_STREAM=stable
@@ -52,7 +52,6 @@ Options:
     --version:    Specify version for output image
     --stream:     Build stream, stable or dev (default: stable)
     --repo:       Software repository, can be specified multiple times
-                    * CentOS format: "NAME,BASEURL"
                     * Debian format: "TYPE [OPTION=VALUE...] URL DISTRO COMPONENTS..."
                       This will be added to /etc/apt/sources.list as is,
                       see also sources.list(5) manpage.
@@ -281,13 +280,7 @@ fi
 
 if [ ${#REPO_LIST[@]} -eq 0 ]; then
     # Either --repo or --local must be specified
-    if [ "${LOCAL}" = "yes" ]; then
-        if [[ "$OS" == "centos" ]] ; then
-            REPO_LIST+=("local-std,http://${HOST}:8088${MY_WORKSPACE}/std/rpmbuild/RPMS")
-            REPO_LIST+=("stx-distro,http://${HOST}:8089${MY_REPO}/cgcs-${OS}-repo/Binary")
-        fi
-        # debian is handled down below
-    elif [ "${BUILD_STREAM}" != "dev" -a "${BUILD_STREAM}" != "master" ]; then
+    if [ "${LOCAL}" != "yes" -a "${BUILD_STREAM}" != "dev" -a "${BUILD_STREAM}" != "master" ]; then
         echo "Either --local or --repo must be specified" >&2
         exit 1
     fi
@@ -314,33 +307,7 @@ fi
 cp ${SRC_DOCKERFILE} ${BUILDDIR}/Dockerfile
 
 # Generate the stx.repo file
-if [[ "$OS" == "centos" ]] ; then
-    STX_REPO_FILE=${BUILDDIR}/stx.repo
-    for repo in ${REPO_LIST[@]}; do
-        repo_name=$(echo $repo | awk -F, '{print $1}')
-        repo_baseurl=$(echo $repo | awk -F, '{print $2}')
-
-        if [ -z "${repo_name}" -o -z "${repo_baseurl}" ]; then
-            echo "Invalid repo specified: ${repo}" >&2
-            echo "Expected format: name,baseurl" >&2
-            exit 1
-        fi
-
-        cat >>${STX_REPO_FILE} <<EOF
-[${repo_name}]
-name=${repo_name}
-baseurl=${repo_baseurl}
-enabled=1
-gpgcheck=0
-skip_if_unavailable=1
-metadata_expire=0
-
-EOF
-
-        REPO_OPTS="${REPO_OPTS} --enablerepo=${repo_name}"
-    done
-else
-
+if [[ "$OS" == "debian" ]] ; then
     # These env vars must be defined in debian builder pods
     for var in DEBIAN_SNAPSHOT DEBIAN_SECURITY_SNAPSHOT DEBIAN_DISTRIBUTION REPOMGR_DEPLOY_URL REPOMGR_ORIGIN ; do
         if [[ -z "${!var}" ]] ; then
@@ -413,9 +380,7 @@ IMAGE_NAME_LATEST=${DOCKER_REGISTRY}${DOCKER_USER}/stx-${OS}:${LATEST_TAG}
 
 declare -a BUILD_ARGS
 BUILD_ARGS+=(--build-arg RELEASE=${OS_VERSION})
-if [[ "$OS" == "centos" ]] ; then
-    BUILD_ARGS+=(--build-arg "REPO_OPTS=${REPO_OPTS}")
-else
+if [[ "$OS" == "debian" ]] ; then
     BUILD_ARGS+=(--build-arg "DIST=${DEBIAN_DISTRIBUTION}")
 fi
 
diff --git a/build-tools/build-docker-images/loci/patches/0001-starlingx-enable-disable-package-repos.patch b/build-tools/build-docker-images/loci/patches/0001-starlingx-enable-disable-package-repos.patch
index 7715fc89..ea4f04a5 100644
--- a/build-tools/build-docker-images/loci/patches/0001-starlingx-enable-disable-package-repos.patch
+++ b/build-tools/build-docker-images/loci/patches/0001-starlingx-enable-disable-package-repos.patch
@@ -52,7 +52,7 @@ new file mode 100755
 index 0000000..dd43612
 --- /dev/null
 +++ b/stx-scripts/setup-package-repos.sh
-@@ -0,0 +1,126 @@
+@@ -0,0 +1,88 @@
 +#!/bin/bash
 +
 +set -ex
@@ -60,11 +60,7 @@ index 0000000..dd43612
 +#
 +# This script enables or disables package repos specified
 +# by the DIST_REPOS environment variable, which must contain
-+# a space-separated list of repos (in CentOS) or list files
-+# (Debian) to enable or disable.
-+#
-+# In CentOS repo names refer to the names in square brackets
-+# in any repo files under /etc/yum.repos.d.
++# a list files (Debian) to enable or disable.
 +#
 +# In Debian repo names refer to individual files under
 +# /etc/apt/sources.list.d/$NAME.list.
@@ -80,8 +76,7 @@ index 0000000..dd43612
 +#           repo, and any repo's passed on the command-line
 +#           to "build-stx-image.sh" script.
 +#
-+#   OS    - same as "base updates extras" in CentOS
-+#           same as "debian" in Debian
++#   OS    - same as "debian" in Debian
 +#
 +#
 +# These keywords have the same meaning in all distros, while actual
@@ -93,15 +88,6 @@ index 0000000..dd43612
 +# If a repo doesn't match an existing repository, this script will
 +# fail.
 +#
-+# CentOS Example
-+# ==============
-+#   DIST_REPOS="-base -updates"
-+#      disable "base" and "updates" repos normally defined
-+#      in /etc/yum.repos.d/CentOS-Base.repo
-+#
-+#   DIST_REPOS="-STX +OS -updates"
-+#      disable all local repos, enable core OS repos, except "updates"
-+#
 +# Debian Example
 +# ==============
 +#   DIST_REPOS="debian"
@@ -119,11 +105,6 @@ index 0000000..dd43612
 +        [OS]="debian"
 +        [STX]="stx"
 +    )
-+    # yum repo IDs
-+    declare -A CENTOS_REPO_GROUPS=(
-+        [OS]="base updates extras"
-+        [STX]="/etc/yum.repos.d/stx.repo"   # ie, all repos defined in this file
-+    )
 +
 +    distro=$(awk -F= '/^ID=/ {gsub(/\"/, "", $2); print $2}' /etc/*release)
 +    # enable or disable each repo
@@ -153,25 +134,6 @@ index 0000000..dd43612
 +                    fi
 +                done
 +                ;;
-+            centos)
-+                specs="${CENTOS_REPO_GROUPS[$base]:-$base}"
-+                for spec in $specs ; do
-+                    # repo id begins with a "/" - assume its a full path to a .repo file
-+                    # and enable/disable all repos defined in that file
-+                    if [[ "${spec#/}" != "$spec" ]] ; then
-+                        repos=$(sed -r -n 's/^\s*[[]([^]]+)[]]\s*$/\1/gp' "$spec")
-+                    else
-+                        repos=$spec
-+                    fi
-+                    for repo in $repos ; do
-+                        if [[ $enable -eq 1 ]] ; then
-+                            yum-config-manager --enable "$repo"
-+                        else
-+                            yum-config-manager --disable "$repo"
-+                        fi
-+                    done
-+                done
-+                ;;
 +            *)
 +                echo "error: unsupported OS \"$distro\"" >&2
 +                exit 1
diff --git a/build-tools/build-docker-images/stx-centos/Dockerfile.dev b/build-tools/build-docker-images/stx-centos/Dockerfile.dev
deleted file mode 100644
index af30a6b6..00000000
--- a/build-tools/build-docker-images/stx-centos/Dockerfile.dev
+++ /dev/null
@@ -1,16 +0,0 @@
-# Expected build arguments:
-#   RELEASE: centos release
-#
-ARG RELEASE=7.5.1804
-FROM centos:${RELEASE}
-
-RUN set -ex ;\
-    sed -i '/\[main\]/ atimeout=120' /etc/yum.conf ;\
-    yum install -y centos-release-openstack-stein ;\
-    rm -rf \
-        /var/log/* \
-        /tmp/* \
-        /var/tmp/*
-
-# root CA cert expired on October 1st, 2021
-RUN yum update -y ca-certificates
diff --git a/build-tools/build-docker-images/stx-centos/Dockerfile.stable b/build-tools/build-docker-images/stx-centos/Dockerfile.stable
deleted file mode 100644
index b30f615a..00000000
--- a/build-tools/build-docker-images/stx-centos/Dockerfile.stable
+++ /dev/null
@@ -1,31 +0,0 @@
-# Expected build arguments:
-#   RELEASE: centos release
-#   REPO_OPTS: yum options to enable StarlingX repo
-#
-ARG RELEASE=7.5.1804
-FROM centos:${RELEASE}
-
-ARG REPO_OPTS
-
-# The stx.repo file must be generated by the build tool first
-COPY stx.repo /
-
-RUN set -ex ;\
-    sed -i '/\[main\]/ atimeout=120' /etc/yum.conf ;\
-    mv /stx.repo /etc/yum.repos.d/ ;\
-    yum upgrade --disablerepo=* ${REPO_OPTS} -y ;\
-    yum install --disablerepo=* ${REPO_OPTS} -y \
-        qemu-img \
-        openssh-clients \
-        python3 \
-        python3-pip \
-        python3-wheel \
-        rh-python36-mod_wsgi \
-        ;\
-    rm -rf \
-        /var/log/* \
-        /tmp/* \
-        /var/tmp/*
-
-# root CA cert expired on October 1st, 2021
-RUN yum update -y ca-certificates
diff --git a/build-tools/build-docker-images/update-stx-image.sh b/build-tools/build-docker-images/update-stx-image.sh
index da6cd3d2..80fa8d8a 100755
--- a/build-tools/build-docker-images/update-stx-image.sh
+++ b/build-tools/build-docker-images/update-stx-image.sh
@@ -58,7 +58,7 @@ Options:
     --module-src: Specify path to module source to install/update (dir or git repo)
                   Formats: dir[|version]
                            url[|branch][|version]
-    --pkg:        Specify path to distro package to install/update (ie. rpm)
+    --pkg:        Specify path to distro package to install/update (ie. deb)
     --customize:  Customization script
     --extra:      Extra file (to be accessible to customization script)
     --push:       Push to docker repo
diff --git a/build-tools/build-guest b/build-tools/build-guest
deleted file mode 100755
index ab1d9d69..00000000
--- a/build-tools/build-guest
+++ /dev/null
@@ -1,412 +0,0 @@
-#!/bin/env bash
-
-#
-# Copyright (c) 2018 Wind River Systems, Inc.
-#
-# SPDX-License-Identifier: Apache-2.0
-#
-
-#
-# Build the tis-centos-image.img or tis-centos-image-rt.img file
-#
-
-BUILD_GUEST_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}" )" )"
-source "${BUILD_GUEST_DIR}/image-utils.sh"
-
-PROGNAME=$(basename "$0")
-
-# NOTE: TMP_DIR must end in '/'
-# NOTE: /tmp/ is now tmpfs like.  Can't be trusted across multiple mock commands
-# TMP_DIR=/tmp/
-TMP_DIR=/
-
-# Use RPMs from the std build only, for now
-export BUILD_TYPE=std
-export MY_BUILD_DIR_TOP=$MY_BUILD_DIR
-
-function init_vars {
-   # Output path (current dir unless MY_WORKSPACE defined)
-   OUTPUT_DIR="$PWD/export"
-   if [ ! -z "$MY_WORKSPACE" ] && [ -d "$MY_WORKSPACE" ] ; then
-       OUTPUT_DIR="$MY_WORKSPACE/export"
-       CGCS_REPO_DIR="$MY_WORKSPACE/rpmbuild/RPMS"
-   fi
-
-   if [ -n "$MY_GUEST_DIR" ]; then
-       GUEST_DIR=$MY_GUEST_DIR
-   else
-       GUEST_DIR=$MY_WORKSPACE/guest
-   fi
-
-   MOCK=/usr/bin/mock
-   if [ $VERBOSE -eq 0 ]; then
-       MOCK="$MOCK -q"
-   fi
-
-   # Path to guest configuration
-   GUEST_BUILD_DIR="${BUILD_GUEST_DIR}/build_guest"
-   GUEST_BUILD_CMD=$GUEST_BUILD_DIR/build-guest-image.py
-   if [ $VERBOSE -eq 1 ]; then
-       GUEST_BUILD_CMD="$GUEST_BUILD_CMD -x"
-   fi
-
-   if [ $BUILD_MODE == 'std' ]; then
-       OUTPUT_FILE=$OUTPUT_DIR/tis-centos-guest.img
-   elif [ $BUILD_MODE == 'rt' ]; then
-       OUTPUT_FILE=$OUTPUT_DIR/tis-centos-guest-rt.img
-   else
-       printf "   Error -- unknown BUILD_MODE '$BUILD_MODE'\n";
-       exit 1
-   fi
-}
-
-
-function check_vars {
-   # Where to store data
-   printf "Finding cgcs-root\n"
-   printf "  Checking \$MY_REPO (value \"$MY_REPO\")\n"
-
-   if [ ! -z "$MY_REPO" ] && [ -d "$MY_REPO" ] ; then
-      INTERNAL_REPO_ROOT=$MY_REPO
-      printf "  Found!\n"
-   fi
-
-   if [ -z "$INTERNAL_REPO_ROOT" ] ; then
-      printf "  No joy -- checking \$MY_REPO_ROOT_DIR (value \"$MY_REPO_ROOT_DIR\")\n"
-      if [ ! -z "$MY_REPO_ROOT_DIR" ] && [ -d "$MY_REPO_ROOT_DIR/cgcs-root" ] ; then
-          INTERNAL_REPO_ROOT=$MY_REPO_ROOT_DIR/cgcs-root
-          printf "  Found!\n"
-      fi
-   fi
-
-   if [ -z "$INTERNAL_REPO_ROOT" ] ; then
-      printf "  No joy -- checking for \$MY_WORKSPACE/cgcs-root\n"
-      if [ -d "$MY_WORKSPACE/cgcs-root" ] ; then
-          INTERNAL_REPO_ROOT=$MY_WORKSPACE/cgcs-root
-          printf "  Found!\n"
-      fi
-   fi
-
-   if [ -z "$INTERNAL_REPO_ROOT" ] ; then
-      printf "  Error -- could not locate cgcs-root repo.\n"
-      exit 1
-   fi
-
-   STX_DIR=$INTERNAL_REPO_ROOT/stx
-
-   if [ "x$MY_BUILD_CFG" == "x" ];then
-       printf "  Error -- reqiure MY_BUILD_CFG to be defined.\n"
-       exit 1
-   fi
-
-   RELEASE_INFO="$(get_release_info)"
-   if [ $? -ne 0 ]; then
-       echo "WARNING: failed to find a release info file."
-   else
-       export PLATFORM_RELEASE=$(source "$RELEASE_INFO" && echo $PLATFORM_RELEASE)
-   fi
-
-}
-
-
-function create_rootfs {
-    printf "\nCreating guest file system\n"
-
-    mkdir -p $GUEST_DIR
-    if [ $? -ne 0 ]; then
-	printf "   Error -- Could not create $GUEST_DIR\n";
-	exit 1
-    fi
-
-    # Place build-time environment variables in mock configuration
-    GUEST_ENV="${MY_BUILD_ENVIRONMENT}-guest"
-    GUEST_CFG=$GUEST_DIR/$MY_BUILD_ENVIRONMENT_FILE
-
-    MY_BUILD_ENVIRONMENT=$GUEST_ENV "${BUILD_GUEST_DIR}/modify-build-cfg" $GUEST_CFG
-    if [ $? -ne 0 ]; then
-	printf "   Error -- Could not update $GUEST_CFG\n";
-	exit 1
-    fi
-
-    # Setup mock directories for the guest
-    if [ -d /localdisk/loadbuild/mock ]; then
-	LNK=/localdisk/loadbuild/mock/$GUEST_ENV
-	if [ ! -L $LNK ]; then
-	    ln -s $GUEST_DIR $LNK
-	fi
-    fi
-
-    if [ -d /localdisk/loadbuild/mock-cache ]; then
-	mkdir -p $GUEST_DIR/cache
-	LNK=/localdisk/loadbuild/mock-cache/$GUEST_ENV
-	if [ ! -L $LNK ]; then
-	    ln -s $GUEST_DIR/cache $LNK
-	fi
-    fi
-
-    # Setup mock chroot environment
-    $MOCK -r $GUEST_CFG --clean && $MOCK -r $GUEST_CFG --init
-    if [ $? -ne 0 ]; then
-	printf "   Error -- Failed to setup guest mock chroot\n";
-	exit 1
-    fi
-
-    # Install the RPMs to the root filesystem
-    
-    # Note that the "rt" build needs access to both local-std and local-rt repos
-    local EXTRA_REPOS=""
-
-    if [ $BUILD_MODE == 'std' ]; then
-       INC_RPM_LIST=$(grep -v '^#' ${GUEST_BUILD_DIR}/rpm-install-list.txt)
-       TIS_RPM_LIST=$(image_inc_list guest std centos)
-    elif [ $BUILD_MODE == 'rt' ]; then
-       INC_RPM_LIST=$(grep -v '^#' ${GUEST_BUILD_DIR}/rpm-install-list-rt.txt)
-       TIS_RPM_LIST=$(image_inc_list guest rt centos)
-       EXTRA_REPOS="--enablerepo local-rt"
-    else
-       printf "   Error -- unknown BUILD_MODE '$BUILD_MODE'\n";
-       exit 1
-    fi
-
-    $MOCK -r $GUEST_CFG ${EXTRA_REPOS} --install ${INC_RPM_LIST} ${TIS_RPM_LIST} "$@"
-    if [ $? -ne 0 ]; then
-        printf "=====\n"
-        cat $GUEST_DIR/mock/result/root.log | sed -n '/Error:/,$p' | sed '/Child return code was:/q'
-        printf "=====\n"
-	printf "   Error -- Failed to install RPM packages\n";
-	exit 1
-    fi
-
-    # Make sure all requested packages are installed
-    MISSING=$(
-        extra_rpm_names="$(
-            for p in "$@" ; do
-                # skip URLs
-                if [[ "$p" =~ :// ]] ; then
-                    continue
-                fi
-                # if it contains a slash or ends with .rpm, assume its a local file
-                # and read its embedded package name
-                if [[ "$p" =~ / || "$p" =~ [.]rpm$ ]] ; then
-                    rpm -q --qf '%{name}\n' -p "$p"
-                # otherwise assume its a package name already
-                else
-                    echo "$p"
-                fi
-            done
-        )"
-        $MOCK -r $GUEST_CFG --chroot -- rpm -q --whatprovides ${INC_RPM_LIST} ${TIS_RPM_LIST} $extra_rpm_names \
-            | sed -n 's/^no package provides //p' \
-            | sort -u
-    )
-    if [ -n "$MISSING" ]; then
-        printf "=====\n"
-        printf "WARNING: The following RPMs are missing or could not be installed:\n"
-        local p
-        for p in $MISSING ; do
-            echo "   [$p]"
-        done
-        printf "=====\n"
-    fi
-
-    # Remove RPMs that are not required in image (pruned package list)
-    # NOTE: these are automatically installed from the mock init not
-    # through dependencies.
-    EXC_RPM_LIST=$(grep -v '^#' ${GUEST_BUILD_DIR}/rpm-remove-list.txt)
-
-    $MOCK -r $GUEST_CFG --remove ${EXC_RPM_LIST}
-    if [ $? -ne 0 ]; then
-	printf "   Error -- Failed to remove RPM packages\n";
-	exit 1
-    fi
-
-    printf "  Done\n"
-}
-
-
-function update_rootfs {
-    printf "\nCustomizing guest file system\n"
-
-    # Copy over skeleton configuration files
-    for GUEST_ROOTFS in $GUEST_BUILD_DIR/rootfs $GUEST_BUILD_DIR/rootfs-$BUILD_MODE;
-    do
-        for f in $(cd $GUEST_ROOTFS && find . -type f | cut -c3-);
-        do
-            echo "$MOCK -r $GUEST_CFG --copyin $GUEST_ROOTFS/$f $f"
-	    $MOCK -r $GUEST_CFG --copyin $GUEST_ROOTFS/$f $f
-	    if [ $? -ne 0 ]; then
-	        printf "   Error -- Failed to copyin file $f\n";
-	        exit 1
-	    fi
-        done
-    done
-
-    # Run the root file system setup script inside the chroot
-    ROOTFS_SETUP=rootfs-setup.sh
-    $MOCK -r $GUEST_CFG --copyin $GUEST_BUILD_DIR/$ROOTFS_SETUP $TMP_DIR && \
-    if [ $BUILD_MODE == 'rt' ]; then
-       ROOTFS_SETUP_CMD="$TMP_DIR$ROOTFS_SETUP --rt"
-    elif [ $BUILD_MODE == 'std' ]; then
-       ROOTFS_SETUP_CMD="$TMP_DIR$ROOTFS_SETUP --std"
-    else
-       ROOTFS_SETUP_CMD="$TMP_DIR$ROOTFS_SETUP"
-    fi
-    $MOCK -r $GUEST_CFG --chroot "$ROOTFS_SETUP_CMD"
-    if [ $? -ne 0 ]; then
-	printf "   Error -- Failed to run guest $ROOTFS_SETUP\n";
-	exit 1
-    fi
-    $MOCK -r $GUEST_CFG --chroot "rm -f $TMP_DIR$ROOTFS_SETUP"
-    if [ $? -ne 0 ]; then
-	printf "   Error -- Failed to delete $ROOTFS_SETUP from guest\n";
-	exit 1
-    fi
-
-    printf "  Done\n"
-}
-
-
-function build_image {
-    # Build the image
-    printf "\nBuilding guest image $OUTPUT_FILE\n"
-
-    mkdir -p $OUTPUT_DIR
-    if [ $? -ne 0 ]; then
-	printf "   Error -- Could not create $OUTPUT_DIR\n";
-	exit 1
-    fi
-
-    # Build guest rootfs archive
-    ROOTFS_SPACE=$((500*1024*1024))
-    ROOTFS_TAR=rootfs.tar
-    ROOTFS_EXCLUDE=rootfs-exclude.txt
-
-    $MOCK -r $GUEST_CFG --copyin $GUEST_BUILD_DIR/$ROOTFS_EXCLUDE $TMP_DIR
-    $MOCK -r $GUEST_CFG --chroot -- tar -cf $TMP_DIR$ROOTFS_TAR -X $TMP_DIR$ROOTFS_EXCLUDE --exclude=$TMP_DIR$ROOTFS_TAR --numeric-owner /
-    $MOCK -r $GUEST_CFG --copyout $TMP_DIR$ROOTFS_TAR $GUEST_DIR
-    $MOCK -r $GUEST_CFG --chroot -- rm -f $TMP_DIR$ROOTFS_TAR
-
-    $GUEST_BUILD_CMD -i $GUEST_DIR/$ROOTFS_TAR -o $OUTPUT_FILE -s $ROOTFS_SPACE
-    if [ $? -ne 0 ]; then
-	printf "   Error -- Failed to build guest image\n";
-	exit 1
-    fi
-
-    printf "  Done\n"
-}
-
-
-function clean_guest {
-    printf "\nCleaning the guest $GUEST_DIR\n"
-
-    if [ ! -e $GUEST_DIR ]; then	
-	printf "  Done...nothing to do\n";
-	exit 0
-    fi
-
-    # Place build-time environment variables in mock configuration
-    GUEST_ENV="${MY_BUILD_ENVIRONMENT}-guest"
-    GUEST_CFG=$GUEST_DIR/$MY_BUILD_ENVIRONMENT_FILE
-
-    if [ ! -e $GUEST_CFG ]; then
-	MY_BUILD_ENVIRONMENT=$GUEST_ENV "${BUILD_GUEST_DIR}/modify-build-cfg" $GUEST_CFG
-	if [ $? -ne 0 ]; then
-	    printf "   Error -- Could not update $GUEST_CFG\n";
-	    exit 1
-	fi
-    fi
-
-    $MOCK -r $GUEST_CFG --clean
-    $MOCK -r $GUEST_CFG --scrub=cache
-
-    rm -rf $GUEST_DIR
-    if [ $? -ne 0 ]; then
-	printf "   Error -- Failed to remove guest $GUEST_DIR\n";
-	exit 1
-    fi
-
-    printf "  Done\n"
-}
-
-#############################################
-# Main code
-#############################################
-
-usage () {
-    echo ""
-    echo "Usage: "
-    echo "   build-guest [--rt | --std] [--verbose] [EXTRA_RPMS...]"
-    echo "   build-guest [--help]"
-    echo "   build-guest [--clean]"
-    echo ""
-    echo "EXTRA_RPMS are either package names or full RPM file paths"
-}
-
-# Default argument values
-HELP=0
-CLEAN=0
-VERBOSE=0
-BUILD_MODE='std'
-
-# read the options
-TEMP=`getopt -o h --long clean,rt,std,verbose,help -n "$PROGNAME" -- "$@"` || exit 1
-eval set -- "$TEMP"
-
-# extract options and their arguments into variables.
-while true ; do
-    case "$1" in
-        -h|--help) HELP=1 ; shift ;;
-        --clean) CLEAN=1 ; shift ;;
-        --verbose) VERBOSE=1 ; shift ;;
-        --rt) BUILD_MODE='rt' ; shift ;;
-        --std) BUILD_MODE='std' ; shift ;;
-        --) shift ; break ;;
-        *) echo "Internal error!" ; exit 1 ;;
-    esac
-done
-
-if [ $HELP -eq 1 ]; then
-   usage
-   exit 0
-fi
-
-if [[ $CLEAN -eq 1 && "$#" -gt 0 ]] ; then
-    echo "Too many arguments!" >&2 ; exit 1
-else
-    # make sure extra RPM files exist
-    for p in "$@" ; do
-        # skip URLs
-        if [[ "$p" =~ :// ]] ; then
-            continue
-        fi
-        # if it contains a slash or ends with .rpm assume its a local file name
-        if [[ "$p" =~ / || "$p" =~ [.]rpm$ ]] ; then
-            # make sure it exists and is an RPM file
-            true <"$p" || exit 1
-            if ! file --brief --mime-type "$p" | grep -q "^application/x-rpm$" ; then
-                echo "$p: not an RPM file" >&2
-                exit 1
-            fi
-        fi
-    done
-    unset p
-fi
-
-(
-printf "\n*****************************\n"
-printf   "Create Titanium Cloud/CentOS Guest Image\n"
-printf   "*****************************\n\n"
-
-init_vars
-check_vars
-
-if [ $CLEAN -eq 1 ]; then
-   clean_guest
-   exit 0
-fi
-
-create_rootfs "$@"
-update_rootfs
-build_image
-
-) 2>&1 | stdbuf -o0 awk '{ print strftime("%H:%M:%S"), $0; fflush(); }' ; exit ${PIPESTATUS[0]}
diff --git a/build-tools/build-helm-charts.sh b/build-tools/build-helm-charts.sh
index e17cc95d..7e38ba76 100755
--- a/build-tools/build-helm-charts.sh
+++ b/build-tools/build-helm-charts.sh
@@ -12,7 +12,7 @@
 BUILD_HELM_CHARTS_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
 source $BUILD_HELM_CHARTS_DIR/utils.sh || exit 1
 
-SUPPORTED_OS_ARGS=('centos' 'debian')
+SUPPORTED_OS_ARGS=('debian')
 OS=
 LABEL=""
 APP_NAME="stx-openstack"
@@ -35,7 +35,7 @@ Usage:
 $(basename $0) [--os <os>] [-a, --app <app-name>]
                [-A, --app-version-file /path/to/$APP_VERSION_BASE]
                [-B, --app-version <version>]
-               [-r, --rpm <rpm-name>] [-i, --image-record <image-record>] [--label <label>]
+               [--package <package-name>] [-i, --image-record <image-record>] [--label <label>]
                [-p, --patch-dependency <patch-dependency>] [ --verbose ]
 Options:
     --os:
@@ -55,13 +55,10 @@ Options:
             Specify application (tarball) version, this overrides any other
             version information.
 
-    -r, --package PACKAGE_NAME,... :
+    --package PACKAGE_NAME,... :
             Top-level package(s) containing the helm chart(s), comma-separated.
             Default: ${APP_NAME}-helm
 
-    --rpm PACKAGE_NAME,... :
-            (Deprecated) same as --package
-
     -i, --image-record FILENAME :
             Specify the path to image record file(s) or url(s).
             Multiple files/urls can be specified with a comma-separated
@@ -136,18 +133,18 @@ function build_image_versions_to_armada_manifest {
         # <docker-registry>/<repository>/<repository>/.../<image-name>:<tag>
         #
         # An example of the content of an image record file:
-        # e.g. images-centos-dev-latest.lst
-        # docker.io/starlingx/stx-aodh:master-centos-dev-latest
-        # docker.io/starlingx/stx-ceilometer:master-centos-dev-latest
-        # docker.io/starlingx/stx-cinder:master-centos-dev-latest
+        # e.g. images-debian-stable-latest.lst
+        # docker.io/starlingx/stx-aodh:master-debian-stable-latest
+        # docker.io/starlingx/stx-ceilometer:master-debian-stable-latest
+        # docker.io/starlingx/stx-cinder:master-debian-stable-latest
         # ...
         #
         # An example of the usage of an image reference in manifest file:
         # e.g. manifest.yaml
         # images:
         #   tags:
-        #     aodh_api: docker.io/starlingx/stx-aodh:master-centos-stable-latest
-        #     aodh_db_sync: docker.io/starlingx/stx-aodh:master-centos-stable-latest
+        #     aodh_api: docker.io/starlingx/stx-aodh:master-debian-stable-latest
+        #     aodh_db_sync: docker.io/starlingx/stx-aodh:master-debian-stable-latest
         #     ...
         #
         # To replace the images in the manifest file with the images in image record file:
@@ -156,14 +153,14 @@ function build_image_versions_to_armada_manifest {
         #    e.g. image_name = stx-aodh
         #
         # 2. search the image reference in manifest yaml via image_name
-        #    e.g. old_image_reference = docker.io/starlingx/stx-aodh:master-centos-stable-latest
+        #    e.g. old_image_reference = docker.io/starlingx/stx-aodh:master-debian-stable-latest
         #
         # 3. update the manifest file to replace the old image references with the new one
         #    e.g. manifest.yaml
         #    images:
         #      tags:
-        #        aodh_api: docker.io/starlingx/stx-aodh:master-centos-dev-latest
-        #        aodh_db_sync: docker.io/starlingx/stx-aodh:master-centos-dev-latest
+        #        aodh_api: docker.io/starlingx/stx-aodh:master-debian-stable-latest
+        #        aodh_db_sync: docker.io/starlingx/stx-aodh:master-debian-stable-latest
         #
         image_record=${IMAGE_RECORD_PATH}/$(basename ${image_record})
         ${PYTHON_2_OR_3} $BUILD_HELM_CHARTS_DIR/helm_chart_modify.py ${manifest_file} ${manifest_file}.tmp ${image_record}
@@ -188,18 +185,18 @@ function build_image_versions_to_fluxcd_manifests {
         # <docker-registry>/<repository>/<repository>/.../<image-name>:<tag>
         #
         # An example of the content of an image record file:
-        # e.g. images-centos-dev-latest.lst
-        # docker.io/starlingx/stx-aodh:master-centos-dev-latest
-        # docker.io/starlingx/stx-ceilometer:master-centos-dev-latest
-        # docker.io/starlingx/stx-cinder:master-centos-dev-latest
+        # e.g. images-debian-stable-latest.lst
+        # docker.io/starlingx/stx-aodh:master-debian-stable-latest
+        # docker.io/starlingx/stx-ceilometer:master-debian-stable-latest
+        # docker.io/starlingx/stx-cinder:master-debian-stable-latest
         # ...
         #
         # An example of the usage of an image reference in manifest file:
         # e.g. manifest.yaml
         # images:
         #   tags:
-        #     aodh_api: docker.io/starlingx/stx-aodh:master-centos-stable-latest
-        #     aodh_db_sync: docker.io/starlingx/stx-aodh:master-centos-stable-latest
+        #     aodh_api: docker.io/starlingx/stx-aodh:master-debian-stable-latest
+        #     aodh_db_sync: docker.io/starlingx/stx-aodh:master-debian-stable-latest
         #     ...
         #
         # To replace the images in the manifest file with the images in image record file:
@@ -208,14 +205,14 @@ function build_image_versions_to_fluxcd_manifests {
         #    e.g. image_name = stx-aodh
         #
         # 2. search the image reference in manifest yaml via image_name
-        #    e.g. old_image_reference = docker.io/starlingx/stx-aodh:master-centos-stable-latest
+        #    e.g. old_image_reference = docker.io/starlingx/stx-aodh:master-debian-stable-latest
         #
         # 3. update the manifest file to replace the old image references with the new one
         #    e.g. manifest.yaml
         #    images:
         #      tags:
-        #        aodh_api: docker.io/starlingx/stx-aodh:master-centos-dev-latest
-        #        aodh_db_sync: docker.io/starlingx/stx-aodh:master-centos-dev-latest
+        #        aodh_api: docker.io/starlingx/stx-aodh:master-debian-stable-latest
+        #        aodh_db_sync: docker.io/starlingx/stx-aodh:master-debian-stable-latest
         #
         image_record=${IMAGE_RECORD_PATH}/$(basename ${image_record})
         find ${manifest_folder} -name "*.yaml" | while read manifest_file; do
@@ -435,23 +432,7 @@ filter_existing_dirs() {
 function find_package_files {
     local -a dirlist
     local dir
-    if [[ "$OS" == "centos" ]] ; then
-        local centos_repo="${MY_REPO}/centos-repo"
-        if [[ ! -d "${centos_repo}" ]] ; then
-            centos_repo="${MY_REPO}/cgcs-centos-repo"
-            if [[ ! -d "${centos_repo}" ]] ; then
-                echo "ERROR: directory ${MY_REPO}/centos-repo not found." >&2
-                exit 1
-            fi
-        fi
-        readarray -t dirlist < <(filter_existing_dirs \
-            "${MY_WORKSPACE}/std/rpmbuild/RPMS" \
-            "${centos_repo}/Binary/noarch")
-        if [[ "${#dirlist[@]}" -gt 0 ]] ; then
-            echo "looking for packages in ${dirlist[*]}" >&2
-            find "${dirlist[@]}" -xtype f -name "*.tis.noarch.rpm"
-        fi
-    else
+    if [[ "$OS" == "debian" ]] ; then
         # FIXME: can't search 3rd-party binary debs because they are not accessible
         # on the filesystem, but only as remote files in apt repos
         readarray -t dirlist < <(filter_existing_dirs "${MY_WORKSPACE}/std")
@@ -491,9 +472,7 @@ function find_helm_chart_package_files {
     local failed=0
     for package_file in $(find_package_files) ; do
         package_name="$(
-            if [[ "$OS" == "centos" ]] ; then
-                rpm_get_name "$package_file" || exit 1
-            else
+            if [[ "$OS" == "debian" ]] ; then
                 deb_get_control "$package_file" | deb_get_field "Package"
                 check_pipe_status
             fi
@@ -537,10 +516,7 @@ function find_helm_chart_package_files {
         fi
 
         local -a dep_package_names=($(
-            if [[ "$OS" == "centos" ]] ; then
-                rpm -qRp "$package_file" | sed 's/rpmlib([a-zA-Z0-9]*)[[:space:]]\?[><=!]\{0,2\}[[:space:]]\?[0-9.-]*//g' | grep -E -v -e '/' -e '^\s*$'
-                check_pipe_status || exit 1
-            else
+            if [[ "$OS" == "debian" ]] ; then
                 deb_get_control "$package_file" | deb_get_simple_depends
                 check_pipe_status || exit 1
             fi
@@ -591,14 +567,6 @@ function extract_chart_from_package {
     local package_file=$1
     echo "extracting charts from package $package_file" >&2
     case $OS in
-        centos)
-            rpm2cpio "$package_file" | cpio ${CPIO_FLAGS}
-            if ! check_pipe_status ; then
-                echo "Failed to extract content of helm package: ${package_file}" >&2
-                exit 1
-            fi
-            ;;
-
         debian)
             deb_extract_content "$package_file" $([[ "$VERBOSE" == "true" ]] && echo --verbose || true)
             if ! check_pipe_status ; then
@@ -671,10 +639,7 @@ function get_app_version {
     echo "extracting version from $1" >&2
     local app_version
     app_version="$(
-        if [[ "$OS" == "centos" ]] ; then
-            rpm -q --qf '%{VERSION}-%{RELEASE}' -p "$1" | sed 's![.]tis!!g'
-            check_pipe_status || exit 1
-        else
+        if [[ "$OS" == "debian" ]] ; then
             control="$(deb_get_control "$1")" || exit 1
             version="$(echo "$control" | deb_get_field "Version" | sed -r -e 's/^[^:]+:+//')"
             if [[ -z "$version" ]] ; then
@@ -689,7 +654,7 @@ function get_app_version {
 }
 
 # TODO(awang): remove the deprecated image-file option
-OPTS=$(getopt -o h,a:,A:,B:,r:,i:,l:,p: -l help,os:,app:,app-version-file:,app-version:,rpm:,package:,image-record:,image-file:,label:,patch-dependency:,verbose -- "$@")
+OPTS=$(getopt -o h,a:,A:,B:,i:,l:,p: -l help,os:,app:,app-version-file:,app-version:,package:,image-record:,image-file:,label:,patch-dependency:,verbose -- "$@")
 if [ $? -ne 0 ]; then
     usage
     exit 1
@@ -720,10 +685,7 @@ while true; do
             APP_VERSION="$2"
             shift 2
             ;;
-        -r | --rpm | --package)
-            if [[ "$1" == "--rpm" ]] ; then
-                echo "WARNING: option $1 is deprecated, use --package instead" >&2
-            fi
+        --package)
             APP_PACKAGES+=(${2//,/ })
             shift 2
             ;;
@@ -770,8 +732,6 @@ if [ -z "$OS" ] ; then
     if [[ -z "$OS" ]] ; then
         echo "Unable to determine OS, please re-run with \`--os' option" >&2
         exit 1
-    elif [[ "$OS" != "debian" ]] ; then
-        OS="centos"
     fi
 fi
 VALID_OS=1
@@ -810,10 +770,8 @@ function find_python_2_or_3 {
 }
 PYTHON_2_OR_3="$(find_python_2_or_3)" || exit 1
 
-# include SRPM utils
-if [[ "$OS" == "centos" ]] ; then
-    source $BUILD_HELM_CHARTS_DIR/srpm-utils || exit 1
-else
+# include packaging utils
+if [[ "$OS" == "debian" ]] ; then
     source $BUILD_HELM_CHARTS_DIR/deb-utils.sh || exit 1
 fi
 
diff --git a/build-tools/build-img b/build-tools/build-img
deleted file mode 100755
index 686be6a9..00000000
--- a/build-tools/build-img
+++ /dev/null
@@ -1,638 +0,0 @@
-#!/bin/bash
-
-PROGNAME=$(basename "$0")
-FORCE=0
-AUTO_MODE=
-IMG_SIZE=
-BOOTIMAGE_ISO=
-GRAPHICAL_SUFFIX=
-IMG_FILE=
-AUTO_ISO=
-DHCPV6C=yes
-OAM_DEV=ens3
-IPV4_GW_ADDR=
-IPV6_GW_ADDR=
-AWS_COMPATIBLE=0
-declare -A PASSWORDS
-: KVM=
-KVM_OPTS=()
-TEMPFILES_DIR=
-SUDO=0
-GRAPHICAL=0
-TTY_SETTINGS=
-RPM_ADDON_LIST=()
-
-# Print out the help message
-usage() {
-    echo "\
-Usage: $0 OPTIONS...
-Create a QCOW2/QEMU image with StarlingX pre-installed
-
- -f,--force        overwrite output file if it exists
-
- -m,--mode={controller|aio|aio_lowlatency}
-                   create a controller or an all-in-one/low latency system
-                   (default: aio)
-
-    --sudo         Use sudo to mount the ISO, rather than udisks
-
- -s,--size=nnnG    image file size, must end with "G" (default: 500G)
-
- -g,--graphical    create a graphical installation, rather than console
-
- -e,--oam-dev=OAM_DEV
-                   OAM network device (default: ens3)
-
- -4,--ipv4         don't configure IPv6 in the generated image
-
- -w,--ipv4-default-gateway=GW_IPV4_ADDR
-                   Add a default IPv4 route via this gateway address
-
- -W,--ipv6-default-gateway=GW_IPV6_ADDR
-                   Add a default IPv6 route via this gateway address
-
- -p,--password=USER:PASSWORD
-                   Unlock USER account and set its password in the generated
-                   image.
-                   USER must exist -- e.g., root, sysadmin.
-                   This option may be repeated.
-
-                   WARNING: this option is not recommended because the
-                            password will be visible to anyone listing the
-                            processes. Use \`--passwords-from' instead.
-
- -P,--passwords-from=PASSWORD_FILE
-                   Unlock and set passwords of each user account from
-                   PASSWORD_FILE, which must contain one or more lines
-                   of the form
-
-                      USER:PASSWORD
-
-                   USERs must exist -- e.g., root, sysadmin.
-
- -S,--passwords-from-stdin
-                   Same as \`--passwords-from=/dev/stdin'
-
- -i,--iso=BOOTIMAGE_ISO
-                   use this iso file as input, it must have been generated
-                   by build-iso with default options
-                   (default: \$MY_WORKSPACE/export/bootimage.iso)
-
- -o,--output=IMG_FILE
-                   output image file name
-                   Default:
-                      \$MY_WORKSPACE/export/stx_\${MODE}.qcow2)
-                   Default with --graphical:
-                      \$MY_WORKSPACE/export/stx_\${MODE}_graphical.qcow2)
-
- --aws
-                   Prepare an image that can be loaded onto an AWS EC2
-                   instance
- --addon
-                   Specify additional rpms to add to the qcow2 image
-
-ENVIRONMENT
-
- MY_REPO           source repo directory
- MY_WORKSPACE      build workspace directory
- KVM               path to kvm executable (default: auto)
-"
-}
-
-# Delete temporary files
-cleanup() {
-   # QEMU changes terminal settings, restore them before exiting
-   [[ -z $TTY_SETTINGS ]] || stty "$TTY_SETTINGS" <&1
-   # remove temporary files
-   rm -rf "$TEMPFILES_DIR"
-   rm -f "$IMG_FILE.tmp"
-}
-
-# Clean up before exiting due to a signal
-handle_sig() {
-   trap - EXIT
-   cleanup
-   exit 1
-}
-
-# Clean up before normal exit
-handle_exit() {
-  local rv="$?"
-  trap - EXIT
-  cleanup
-  exit $rv
-}
-
-# Print out an error message
-error() {
-    echo "$PROGNAME: error: $*" >&2
-}
-
-# Print out an error message and exit
-die() {
-    error "$*"
-    exit 1
-}
-
-# Print out a command-line error message and exit
-cmdline_error() {
-    if [ "$#" -gt 0 ] ; then
-        error "$*"
-    fi
-    echo "Type \`$0 --help' for more info." >&2
-    exit 2
-}
-
-# Encrypt a password for /etc/passwd
-encrypt_password() {
-    export ARG="$1"
-    python -c '
-import crypt, os, binascii, sys
-salt = binascii.b2a_hex(os.urandom (8)).decode("ascii")
-encrypted = crypt.crypt (os.environ["ARG"], "$5$" + salt + "$")
-print (encrypted)
-' "$1"
-    local status="$?"
-    unset ARG
-    [[ $status -eq 0 ]] || exit 1
-}
-
-# Save username/password to $PASSWORDS
-save_password() {
-    local passwd_str="$1"
-    local error_prefix="$2"
-    if [[ ! $passwd_str =~ : ]] ; then
-        error "${error_prefix}expecting USER:PASSWORD"
-        return 1
-    fi
-    local user="${passwd_str%%:*}"
-    local passwd="${passwd_str#*:}"
-    if [[ -z $user || -z $passwd ]] ; then
-        error "${error_prefix}expecting USER:PASSWORD"
-        return 1
-    fi
-    if [[ $user =~ [^a-zA-Z0-9._-] ]] ; then
-        error "${error_prefix}username must only contain characters [a-zA-Z0-9._-]"
-        return 1
-    fi
-    PASSWORDS[$user]="$passwd"
-    return 0
-}
-
-# Read passwords from file or STDIN
-read_passwords() {
-    local filename="$1"
-    local -i lineno=0
-    local numchar="#"
-    # Open password file or STDIN as file descriptor 3
-    if [[ -z $filename || $filename == - ]] ; then
-        filename=STDIN
-        exec 3<&0 || exit 1
-    else
-        exec 3<"$filename" || exit 1
-    fi
-    while read line <&3 ; do
-        let lineno++
-        # skip empty lines and comments
-        # ${numchar} is "#" to avoid tripping up VI's syntax highlighting
-        if [[ ! $line =~  ^[[:space:]]*(${numchar}.*)?*$ ]] ; then
-            save_password "$line" "$filename:$lineno: " || exit 1
-        fi
-    done
-    # close file descriptor 3
-    exec 3<&-
-}
-
-# Check if an IPv4 address is valid
-is_ipv4_addr() {
-    # make sure we have python
-    python -c 'import socket' || exit 1
-    # parse the address via python
-    python -c 'import socket,sys;socket.inet_aton(sys.argv[1])' "$1" >/dev/null 2>&1
-}
-
-# Check if an IPv6 address is valid
-is_ipv6_addr() {
-    # make sure we have python
-    python -c 'import socket' || exit 1
-    # parse the address via python
-    python -c 'import socket,sys;socket.inet_pton(socket.AF_INET6,sys.argv[1])' "$1" >/dev/null 2>&1
-}
-
-# find QEMU/KVM
-find_kvm() {
-    local kvm
-    if [[ -n "$KVM" ]] ; then
-        kvm=$(which "$KVM")
-        [[ -n $kvm ]] || exit 1
-    else
-        for kvm_basename in qemu-kvm kvm ; do
-            kvm=$(export PATH=$PATH:/usr/bin:/usr/libexec ; which $kvm_basename 2>/dev/null || :)
-            [[ -n $kvm ]] && break || :
-        done
-        [[ -n $kvm ]] || die "unable to find kvm executable"
-    fi
-    KVM="$kvm"
-    if [[ -c /dev/kvm ]] ; then
-        KVM_OPTS+=("-enable-kvm")
-    fi
-}
-
-# Perform setup work for an image to run on AWS
-# Create config files for adding ENA driver module, network scripts, and for
-# regenerating a generic initramfs image
-add_aws_setup(){
-    local ks_addon=$1
-    AWS_OAM_IF=ens5
-    AWS_MGMT_IF=ens6
-    cat >>"$ks_addon" <<_END
-
-# Comment out deprecated virtio by-path rules to avoid duplicate symlinks
-sed -i 's/^\(KERNEL.*disk\/by-path\/virtio\)/#\1/' /usr/lib/udev/rules.d/60-persistent-storage.rules
-
-cat >/etc/modules-load.d/ena.conf <<END
-ena
-END
-
-cat >/etc/dracut.conf.d/add-ena.conf <<END
-add_drivers+=" ena "
-END
-
-cat >/etc/dracut.conf.d/no-hostonly.conf <<END
-hostonly="no"
-END
-
-cat >/etc/sysconfig/network-scripts/ifcfg-${AWS_OAM_IF} <<END
-DEVICE=${AWS_OAM_IF}
-BOOTPROTO=dhcp
-ONBOOT=yes
-TYPE=Ethernet
-USERCTL=yes
-PEERDNS=yes
-DHCPV6C=yes
-DHCPV6C_OPTIONS=-nw
-PERSISTENT_DHCLIENT=yes
-RES_OPTIONS="timeout:2 attempts:5"
-DHCP_ARP_CHECK=no
-END
-
-cat >/etc/sysconfig/network-scripts/ifcfg-${AWS_MGMT_IF} <<END
-DEVICE=${AWS_MGMT_IF}
-BOOTPROTO=dhcp
-ONBOOT=yes
-TYPE=Ethernet
-USERCTL=yes
-PEERDNS=yes
-DHCPV6C=yes
-DHCPV6C_OPTIONS=-nw
-PERSISTENT_DHCLIENT=yes
-RES_OPTIONS="timeout:2 attempts:5"
-DHCP_ARP_CHECK=no
-END
-
-if [ ! -d /var/tmp ]; then
-    mkdir -m 1777 /var/tmp
-fi
-
-KERNEL_VERSION=\$(rpm -q kernel --qf '%{version}-%{release}.%{arch}')
-/sbin/dracut -f /boot/initramfs-\$KERNEL_VERSION.img \$KERNEL_VERSION
-_END
-}
-
-# Process command line
-init() {
-    local temp
-    temp=$(getopt -o hf4w:W:e:p:P:Sm:gs:i:o: --long help,force,ipv4,ipv4-default-gateway:,ipv6-default-gateway:,oam-dev:,password:,passwords-from:,passwords-from-stdin,mode:,graphical,sudo,size:,iso:,output:,aws,addon: -n "$PROGNAME" -- "$@") || cmdline_error
-    eval set -- "$temp"
-    while true ; do
-        case "$1" in
-            -h|--help)
-                usage
-                exit 0
-                ;;
-            -f|--force)
-                FORCE=1
-                shift
-                ;;
-            -4|--ipv4)
-                DHCPV6C=no
-                shift
-                ;;
-            -w|--ipv4-default-gateway)
-                is_ipv4_addr "$2" || cmdline_error "invalid IP address \`$2'"
-                IPV4_GW_ADDR="$2"
-                shift 2
-                ;;
-            -W|--ipv6-default-gateway)
-                is_ipv6_addr "$2" || cmdline_error "invalid IP address \`$2'"
-                IPV6_GW_ADDR="$2"
-                shift 2
-                ;;
-            -e|--oam-dev)
-                OAM_DEV="$2"
-                shift 2
-                ;;
-            -P|--passwords-from)
-                read_passwords "$2"
-                shift 2
-                ;;
-            -S|--passwords-from-stdin)
-                read_passwords -
-                shift
-                ;;
-            -p|--password)
-                save_password "$2" "invalid $1: " || cmdline_error
-                shift 2
-                ;;
-            -m|--mode)
-                [[ "$2" =~ ^(controller|aio|aio_lowlatency)$ ]] || cmdline_error "invalid --mode"
-                AUTO_MODE="$2"
-                shift 2
-                ;;
-            -g|--graphical)
-                GRAPHICAL=1
-                GRAPHICAL_SUFFIX=_graphical
-                shift
-                ;;
-            --sudo)
-                SUDO=1
-                shift
-                ;;
-            -s|--size)
-                [[ $2 =~ ^[0-9]{1,5}G$ ]] || cmdline_error "invalid --size"
-                IMG_SIZE="$2"
-                shift 2
-                ;;
-            -i|--iso)
-                BOOTIMAGE_ISO="$2"
-                shift 2
-                ;;
-            -o|--output)
-                IMG_FILE="$2"
-                shift 2
-                ;;
-            --aws)
-                AWS_COMPATIBLE=1
-                shift
-                ;;
-            --addon)
-                RPM_ADDON_LIST+=("$2")
-                shift 2
-                ;;
-            --)
-                shift
-                break
-                ;;
-            -?*)
-                cmdline_error
-                ;;
-            *)
-                break
-                ;;
-        esac
-    done
-    [[ $# -le 0 ]] || cmdline_error "too many arguments"
-
-    # These are required
-    [[ -n $MY_WORKSPACE ]] || die "MY_WORKSPACE is not set"
-    [[ -n $MY_REPO ]] || die "MY_REPO is not set"
-
-    # Defaults
-    : ${AUTO_MODE:=aio}
-    : ${IMG_SIZE:=500G}
-    : ${BOOTIMAGE_ISO:=$MY_WORKSPACE/export/bootimage.iso}
-    : ${IMG_FILE:=$MY_WORKSPACE/export/stx_${AUTO_MODE}${GRAPHICAL_SUFFIX}.qcow2}
-}
-
-# main
-init "$@"
-
-# make sure we clean up before exiting
-trap handle_sig  INT TERM PIPE HUP
-trap handle_exit EXIT
-
-# make sure update-iso-centos.sh exists
-UPDATE_ISO=$MY_REPO/stx/utilities/utilities/platform-util/scripts/update-iso-centos.sh
-: <"$UPDATE_ISO" || exit 1
-
-# make sure input ISO file exists
-: <"$BOOTIMAGE_ISO" || exit 1
-
-# make sure patch_build.sh exists
-PATCH_BUILD=$MY_REPO/stx/update/extras/scripts/patch_build.sh
-: <"$PATCH_BUILD" || exit 1
-
-# find patch-iso
-which patch-iso >/dev/null || exit 1
-
-# find QEMU/KVM
-find_kvm
-
-# find qemu-img
-which qemu-img >/dev/null || exit 1
-
-# refuse to overwrite existing output file
-if [[ -e "$IMG_FILE" ]] && [[ $FORCE -ne 1 ]] ; then
-    die "output file $IMG_FILE already exist, delete it first or use --force"
-fi
-
-# which menu item to use?
-menu_item=
-case "$AUTO_MODE" in
-    controller)     menu_item=0 ;;
-    aio)            menu_item=2 ;;
-    aio_lowlatency) menu_item=4 ;;
-    *)              die "internal error" ;;
-esac
-
-# create a directory for temporary files
-TEMPFILES_DIR=$(mktemp -d -t build_img.XXXXXXXX) || exit 1
-
-# create an updated iso with the menu item pre-selected
-auto_iso="$TEMPFILES_DIR/bootimage_${AUTO_MODE}${GRAPHICAL_SUFFIX}.iso"
-rm -f "$auto_iso"
-cmd=()
-if [[ $SUDO == 1 ]] ; then
-    cmd+=(sudo)
-fi
-cmd+=("$UPDATE_ISO" -i "$BOOTIMAGE_ISO" -o "$auto_iso" -d "$menu_item" -t 3)
-
-if [[ $AWS_COMPATIBLE == 1 ]] ; then
-    cmd+=(-p rdloaddriver=ena)
-fi
-
-# generate a kickstart add-on
-ks_addon="$TEMPFILES_DIR/ks_addon.sh"
-echo "#### start ks-addon.cfg" >"$ks_addon"
-# configure $OAM_DEV
-cat >>"$ks_addon" <<_END
-# configure $OAM_DEV
-uuid=\$(uuidgen)
-cat >/etc/sysconfig/network-scripts/ifcfg-$OAM_DEV <<END
-UUID=\$uuid
-DEVICE=$OAM_DEV
-NAME=$OAM_DEV
-TYPE=Ethernet
-PROXY_METHOD=none
-BROWSER_ONLY=no
-BOOTPROTO=dhcp
-DEFROUTE=yes
-IPV4_FAILURE_FATAL=no
-IPV6INIT=yes
-IPV6_AUTOCONF=no
-IPV6_DEFROUTE=yes
-IPV6_FAILURE_FATAL=no
-IPV6_ADDR_GEN_MODE=stable-privacy
-ONBOOT=yes
-DHCPV6C=$DHCPV6C
-END
-_END
-
-# Add default routes
-if [[ -n "$IPV4_GW_ADDR" ]] ; then
-    cat >>"$ks_addon" <<_END
-# Add a default IPv4 route
-echo "default via $IPV4_GW_ADDR dev $OAM_DEV metric 1" >/etc/sysconfig/network-scripts/route-$OAM_DEV
-_END
-fi
-if [[ -n "$IPV6_GW_ADDR" ]] ; then
-    cat >>"$ks_addon" <<_END
-# Add a default IPv6 route
-echo "default via $IPV6_GW_ADDR dev $OAM_DEV metric 1" >/etc/sysconfig/network-scripts/route6-$OAM_DEV
-_END
-fi
-
-# Disable cloud-init networking if cloud-init is installed
-cat >>"$ks_addon" <<_END
-if [ -d /etc/cloud/cloud.cfg.d/ ]; then
-    echo "network: {config: disabled}" > /etc/cloud/cloud.cfg.d/99-disable-networking.cfg
-fi
-_END
-
-# Set passwords
-for user in "${!PASSWORDS[@]}" ; do
-    encrypted=$(encrypt_password "${PASSWORDS[$user]}")
-    [[ $? -eq 0 ]] || exit 1
-    cat >>"$ks_addon" <<_END
-# set ${user}'s password
-usermod -e '' -p '$encrypted' '$user' || exit 1
-chage --inactive -1 --maxdays -1 --lastday \$(date '+%Y-%m-%d') '$user' || exit 1
-_END
-done
-
-# Comment-out global_filter in lvm.conf
-# The installer normally sets it to the installer hard drive's bus address,
-# and LVM doesn't come up when booted in different emulation environment.
-cat >>"$ks_addon" <<'_END'
-# Comment-out global_filter in lvm.conf
-sed -r -i 's!^(\s*)global_filter\s*=.*!\1# global_filter = [ "a|.*/|" ]!' /etc/lvm/lvm.conf
-_END
-
-# Change grub parameters to boot to graphical console.
-# The installer sets these to use the serial port when we install
-# in text mode.
-if [[ $GRAPHICAL -eq 1 ]] ; then
-    cat >>"$ks_addon" <<'_END'
-# Boot in graphical mode
-sed -r -i \
-    -e '/^\s*GRUB_SERIAL_COMMAND=/       d' \
-    -e '/^\s*GRUB_TERMINAL(_OUTPUT)?=/   s/=.*/="console"/' \
-    -e '/^\s*GRUB_CMDLINE_LINUX=/        s/\bconsole=ttyS0,\S+/console=tty0/' \
-    /etc/default/grub
-if [ -d /sys/firmware/efi ] ; then
-  grub2-mkconfig -o /boot/efi/EFI/centos/grub.cfg
-else
-  grub2-mkconfig -o /boot/grub2/grub.cfg
-fi
-_END
-fi
-
-# Add necessary setup work for an aws image to the ks_addon script
-if [[ $AWS_COMPATIBLE == 1 ]] ; then
-    add_aws_setup $ks_addon
-fi
-
-echo "#### end ks-addon.cfg" >>"$ks_addon"
-cmd+=(-a "$ks_addon")
-
-# execute update_iso.sh
-echo "${cmd[@]}"
-"${cmd[@]}" || exit 1
-
-# patch the iso if additional rpms are specified
-if [ ${#RPM_ADDON_LIST[@]} -gt 0 ] ; then
-    # Patch build will drop the generated patch file into the current directory.
-    # We want that to be $MY_WORKSPACE.
-    pushd $MY_WORKSPACE
-    patch_file="PATCH.img-addon"
-    patched_iso="$TEMPFILES_DIR/bootimage_${AUTO_MODE}${GRAPHICAL_SUFFIX}_patched.iso"
-    cmd=("$PATCH_BUILD" --id "${patch_file}" --summary "additional packages for qcow2 image" --desc "Adds customizations to qcow2 image" --status "REL" --reboot-required "N")
-    for rpm_addon in "${RPM_ADDON_LIST[@]}"; do
-        cmd+=(--all-nodes "${rpm_addon}")
-    done
-    # create the patch file
-    echo "${cmd[@]}"
-    "${cmd[@]}" || exit 1
-    cmd=(patch-iso -i "$auto_iso" -o "$patched_iso" "${MY_WORKSPACE}/${patch_file}.patch")
-    # execute patch-iso
-    echo "${cmd[@]}"
-    "${cmd[@]}" || exit 1
-    mv ${patched_iso} ${auto_iso}
-    popd
-fi
-
-# create a blank image file
-rm -f "$IMG_FILE.tmp"
-cmd=(qemu-img create "$IMG_FILE.tmp" -f qcow2 "$IMG_SIZE")
-echo "${cmd[@]}"
-"${cmd[@]}" || exit 1
-
-# run the installer in QEMU
-cmd=(
-    "$KVM"
-    "${KVM_OPTS[@]}"
-    -m 8192
-    -drive file="$IMG_FILE.tmp",if=ide
-    -cdrom "$auto_iso"
-    -boot d
-    -no-reboot
-    -nographic
-    -smp 4
-)
-# if STDOUT is a terminal, save current terminal settings
-# so that we can restore them later
-if [[ -t 1 ]] ; then
-    TTY_SETTINGS=$(stty -g <&1)
-# otherwise, disable QEMU's terminal features
-else
-    cmd+=(-serial file:/dev/stdout)
-fi
-# execute qemu
-echo "${cmd[@]}"
-"${cmd[@]}" 2>&1 | tee $TEMPFILES_DIR/kvm.log
-if [[ ${PIPESTATUS[0]} -ne 0 || ${PIPESTATUS[1]} -ne 0 ]] ; then
-    die "qemu: installation failed"
-fi
-
-# QEMU exits with status=0 even when killed by a signal. Check its output
-# for a known message to detect this case
-if tail "$TEMPFILES_DIR/kvm.log" | grep -q -E "(qemu|kvm).*: terminating on signal" ; then
-    die "qemu terminated by a signal"
-fi
-
-# rename tmp image file to the final name
-mv -f "$IMG_FILE.tmp" "$IMG_FILE" || exit 1
-
-# done
-echo "
-Created $IMG_FILE
-
-To use this image, type:
-"
-if [[ $GRAPHICAL -eq 1 ]] ; then
-    echo "    $KVM ${KVM_OPTS[@]} -m 16384 -drive file=$IMG_FILE,if=ide -boot c -smp 4"
-    echo
-    echo "(requires a graphical console)"
-else
-    echo "    $KVM ${KVM_OPTS[@]} -m 16384 -drive file=$IMG_FILE,if=ide -boot c -nographic -smp 4"
-fi
diff --git a/build-tools/build-iso b/build-tools/build-iso
deleted file mode 100755
index fc366699..00000000
--- a/build-tools/build-iso
+++ /dev/null
@@ -1,853 +0,0 @@
-#!/bin/bash
-
-#
-# Copyright (c) 2018-2020 Wind River Systems, Inc.
-#
-# SPDX-License-Identifier: Apache-2.0
-#
-
-# Build the export/bootimage.iso file
-#
-# This script uses environment variables to determine the source of
-# packages, and bundles the packages into a bootable .iso
-#
-# It starts by building a basic "vanilla CentOS" ISO, and then adds our
-# packages to it.
-
-BUILD_ISO_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}" )" )"
-source "${BUILD_ISO_DIR}/image-utils.sh"
-source "${BUILD_ISO_DIR}/git-utils.sh"
-
-# Set REPOQUERY, REPOQUERY_SUB_COMMAND, REPOQUERY_RESOLVE and
-# REPOQUERY_WHATPROVIDES_DELIM for our build environment.
-source "${BUILD_ISO_DIR}/pkg-manager-utils.sh"
-
-usage () {
-    echo ""
-    echo "Usage: "
-    echo "   build-iso [--auto <mode>] [--file <filename>] "
-    echo "             [--device <device>] [--skip-sign]"
-    echo "             [--sudo|udev]"
-    echo "   --file <bootimage.iso> destination ISO file"
-    echo "   --auto <controller|cpe> Modify kickstart to auto-install controller or cpe"
-    echo "                           mode"
-    echo "   --device <sda> Use a different boot/rootds device (default is sda)"
-    echo "   --skip-sign do not add file signature to RPMs"
-    echo "   --sudo Use \"sudo\" command to access EFI image filesystem (default)"
-    echo "   --udev Use udev to access EFI image filesystem"
-    echo ""
-    echo "   Note that environment variable BUILD_ISO_USE_UDEV=1 will have the same effect"
-    echo "     as the --udev option"
-    echo ""
-}
-
-MY_YUM_CONF=""
-STD_REPO_ID="local-std"
-RT_REPO_ID="local-rt"
-LOWER_LAYER_STD_REPO_ID=""
-LOWER_LAYER_RT_REPO_ID=""
-
-NPROCS=$(nproc)
-
-export MOCK=/usr/bin/mock
-
-CREATEREPO=$(which createrepo_c)
-if [ $? -ne 0 ]; then
-   CREATEREPO="createrepo"
-fi
-
-# TEMPORARY: Check for isohybrid now to give a warning about installing pkg
-if [ ! -f /usr/bin/isohybrid ]; then
-    echo "Missing required utility: /usr/bin/isohybrid"
-    echo "Installation of syslinux is required:"
-    echo "    sudo yum install -y syslinux"
-    exit 1
-fi
-
-function install_pkg_list {
-   local PKGLIST=$1
-   if [ "x$PKGLIST" == "x" ]; then
-       return 1
-   fi
-
-   OLD_PWD=$PWD
-
-   echo "Installing packages listed in $PKGLIST and dependancies"
-   \rm -f $OUTPUT_DIR/dist/report_deps.txt
-   $CREATEREPO $CGCS_REPO_DIR
-   $CREATEREPO $CGCS_RT_REPO_DIR
-
-   \cp -v $MY_YUM_CONF $OUTPUT_DIR
-
-   \cd $OUTPUT_DIST_DIR/isolinux/Packages
-   $INTERNAL_REPO_ROOT/build-tools/build_iso/cgts_deps.sh --deps=$PKGLIST
-
-   if [ $? -ne 0 ]
-   then
-      echo "Could not install dependencies"
-      exit 1
-   fi
- 
-   # clean up
-   echo "Removing local-std yum repo  $CGCS_REPO_DIR/repodata"
-   echo "Removing local-rt yum repo   $CGCS_RT_REPO_DIR/repodata"
-
-   \cd $OLD_PWD
-}
-
-# Generate the report of where all packages come from
-function make_report {
-   local PKGLISTFILES=$@
-   if [ "x$PKGLISTFILES" == "x" ]; then
-       return 1
-   fi
-   echo "MAKING $REPORT_FILE"
-   echo "-----------------" >> $REPORT_FILE
-
-   echo "ISO REPORT" > $REPORT_FILE
-   date >>  $REPORT_FILE
-   echo "-----------------" >> $REPORT_FILE
-
-   echo " " >> $REPORT_FILE
-   echo "-----------------" >> $REPORT_FILE
-   echo "EXPLICIT INCLUDES" >> $REPORT_FILE
-   echo "-----------------" >> $REPORT_FILE
-   for PKGLIST in $PKGLISTFILES; do
-      while read PKG; do
-         PKG=`echo $PKG | sed "s/#.*//"`;
-         if [ "${PKG}x" != "x" ]; then
-            echo $PKG  >> $REPORT_FILE
-         fi
-      done < $PKGLIST
-   done
-
-   echo " " >> $REPORT_FILE
-   echo "-----------------" >> $REPORT_FILE
-   echo " PACKAGES        " >> $REPORT_FILE
-   echo "-----------------" >> $REPORT_FILE
-   cat $BUILT_REPORT | sort | uniq >> $REPORT_FILE
-
-   echo " " >> $REPORT_FILE
-   echo "-----------------" >> $REPORT_FILE
-   echo " WARNINGS        " >> $REPORT_FILE
-   echo "-----------------" >> $REPORT_FILE
- 
-   # Note that the warnings file may have multiple lines for the same
-   # missing dependency.  A sort | uniq solves this so we don't duplicate
-   # warnings
-   cat $WARNINGS_REPORT | sort | uniq >> $REPORT_FILE
-
-   echo "ISO REPORT: $REPORT_FILE"
-}
-
-function init_vars {
-   #####################################
-   # Input definitions
-
-   # Where all CentOS packages live
-   # Where essential CentOS (minimal install) packages live
-   INTERNAL_REPO_ROOT=
-   STX_DIR=
-
-   # Where BSP files live
-   export BSP_FILES_PATH=
-
-   # Where our own packages live
-   CGCS_REPO_DIR=$MY_WORKSPACE/std/rpmbuild/RPMS
-   CGCS_RT_REPO_DIR=$MY_WORKSPACE/rt/rpmbuild/RPMS
-
-   MY_YUM_CONF=$(create-yum-conf)
-   if [ $? -ne 0 ]; then
-      echo "ERROR: create-yum-conf failed"
-      exit 1
-   fi
-
-   # LOWER_LAYER_STD_REPO_ID should be something like StxCentos7Distro or StxCentos8Distro
-   LOWER_LAYER_STD_REPO_ID=$(grep '\[StxCentos.*Distro\]' ${MY_YUM_CONF} | sed -e 's/^\[//' -e 's/\].*//')
-   LOWER_LAYER_RT_REPO_ID=$(grep '\[StxCentos.*Distro-rt\]' ${MY_YUM_CONF} | sed -e 's/^\[//' -e 's/\].*//')
-
-   DISTRO_REPO_DIR=$(for d in $(grep baseurl $MY_YUM_CONF | grep file: | awk -F : '{print $2}' | sed 's:///:/:g'); do if [ -d $d/images ]; then echo $d ;fi; done)
-
-   #####################################
-   # Output definitons
-
-   # where to put stuff (curent dir unless MY_WORKSPACE defined)
-   OUTPUT_DIR="$PWD/export" 
-   if [ ! -z "$MY_WORKSPACE" ] && [ -d "$MY_WORKSPACE" ] ; then
-       OUTPUT_DIR="$MY_WORKSPACE/export"
-       CGCS_REPO_DIR="$MY_WORKSPACE/std/rpmbuild/RPMS"
-       CGCS_RT_REPO_DIR="$MY_WORKSPACE/rt/rpmbuild/RPMS"
-   fi
-
-   # Directory in which to populate files to be distributed
-   if [ $CUMULUS -eq 0 ]; then
-      OUTPUT_DIST_DIR=$OUTPUT_DIR/dist
-   else
-      OUTPUT_DIST_DIR=$OUTPUT_DIR/dist-cumulus
-   fi
-
-   # Package disc image
-   OUTPUT_FILE=$OUTPUT_DIR/$DEST_FILE
-
-   # Generate an error if the output file is below this threshold
-   MINIMUM_EXPECTED_SIZE=500000000
-
-   # For backward compatibility.  Old repo location or new?
-   CENTOS_REPO=${MY_REPO}/centos-repo
-   if [ ! -d ${CENTOS_REPO} ]; then
-      CENTOS_REPO=${MY_REPO}/cgcs-centos-repo
-      if [ ! -d ${CENTOS_REPO} ]; then
-         CENTOS_REPO=${MY_REPO}/centos-repo
-      fi
-   fi
-
-   # report variables
-   REPORT_FILE=$OUTPUT_DIR/report.txt
-   BUILT_REPORT=$OUTPUT_DIR/local.txt
-   CLOUD_REPORT=$OUTPUT_DIR/cloud.txt
-   CLOUD_COMMON_REPORT=$OUTPUT_DIR/cloudcommon.txt
-   CENTOS_REPORT=$OUTPUT_DIR/centos.txt
-   EPEL_REPORT=$OUTPUT_DIR/epel.txt
-   WARNINGS_REPORT=$OUTPUT_DIR/warnings.txt
-
-   \rm -f $REPORT_FILE
-   \rm -f $BUILT_REPORT
-   \rm -f $CLOUD_REPORT
-   \rm -f $CLOUD_COMMON_REPORT
-   \rm -f $CENTOS_REPORT
-   \rm -f $WARNINGS_REPORT
-}
-
-# check input variables
-function check_vars {
-   # Where to store data
-   printf "Finding cgcs-root\n"
-   printf "  Checking \$MY_REPO (value \"$MY_REPO\")\n"
-
-   if [ ! -z "$MY_REPO" ] && [ -d "$MY_REPO" ] ; then
-      INTERNAL_REPO_ROOT=$MY_REPO
-      printf "  Found!\n"
-   fi
-
-   if [ -z "$INTERNAL_REPO_ROOT" ] ; then
-      printf "  No joy -- checking \$MY_REPO_ROOT_DIR (value \"$MY_REPO_ROOT_DIR\")\n"
-      if [ ! -z "$MY_REPO_ROOT_DIR" ] && [ -d "$MY_REPO_ROOT_DIR/cgcs-root" ] ; then
-          INTERNAL_REPO_ROOT=$MY_REPO_ROOT_DIR/cgcs-root
-          printf "  Found!\n"
-      fi
-   fi
-
-   if [ -z "$INTERNAL_REPO_ROOT" ] ; then
-      printf "  No joy -- checking for \$MY_WORKSPACE/cgcs-root\n"
-      if [ -d "$MY_WORKSPACE/cgcs-root" ] ; then
-          INTERNAL_REPO_ROOT=$MY_WORKSPACE/cgcs-root
-          printf "  Found!\n"
-      fi
-   fi
-
-   if [ -z "$INTERNAL_REPO_ROOT" ] ; then
-      printf "  Error -- could not locate cgcs-root repo.\n"
-      exit 1
-   fi
-
-   if [ ! -z "${CENTOS_REPO}" ] && [ ! -d ${CENTOS_REPO} ]; then
-      echo "  Error -- directory '${CENTOS_REPO}' not found."
-      exit 1
-   fi
-
-   STX_DIR=$INTERNAL_REPO_ROOT/stx
-
-   printf "\nChecking that we can access $DISTRO_REPO_DIR\n"
-   if [ ! -d "$DISTRO_REPO_DIR" ] ; then
-      printf "  Error -- could not access $DISTRO_REPO_DIR\n"
-      exit 1
-   fi
-
-   if [ ! -e "$DISTRO_REPO_DIR/repodata" ] ; then
-      printf "  Error -- $DISTRO_REPO_DIR is there, but does not seem sane\n"
-   fi
-
-   printf "\nOkay, input looks fine...\n\n"
-   printf "Creating output directory $OUTPUT_DIST_DIR\n"
-   if [ $CLEAN_FLAG -eq 1 ]; then
-      echo "  Cleaning..."
-      if [ -e $OUTPUT_DIST_DIR ] ; then
-         chmod -R a+w $OUTPUT_DIST_DIR
-         \rm -rf $OUTPUT_DIST_DIR
-      fi
-      if [ -e $OUTPUT_DIST_DIR ] ; then
-         printf "Error: could not remove old $OUTPUT_DIST_DIR\n"
-         exit 1
-      fi
-   fi
-
-   \mkdir -p $OUTPUT_DIST_DIR
-   if [ ! -d $OUTPUT_DIST_DIR ] ; then
-      printf "Error: could not create $OUTPUT_DIST_DIR\n"
-      exit 1
-   fi
-
-   RELEASE_INFO="$(get_release_info)"
-   if [ $? -ne 0 ]; then
-      echo "ERROR: failed to find a release info file."
-      exit 1
-   fi
-
-   export PLATFORM_RELEASE=$(source "$RELEASE_INFO" && echo $PLATFORM_RELEASE)
-
-   # Where BSP files live
-   export BSP_FILES_PATH="$(get_bsp_dir)"
-   echo "  Done"
-   echo ""
-}
-
-function init_output_dir {
-   echo "Creating base output directory in $OUTPUT_DIST_DIR"
-   \mkdir -p $OUTPUT_DIST_DIR/isolinux/images
-   \mkdir -p $OUTPUT_DIST_DIR/isolinux/ks
-   \mkdir -p $OUTPUT_DIST_DIR/isolinux/LiveOS
-   \mkdir -p $OUTPUT_DIST_DIR/isolinux/Packages
-   \mkdir -p $OUTPUT_DIST_DIR/utils
-
-   \mkdir -p $OUTPUT_DIST_DIR/isolinux/EFI
-   # This directory will contains files required for the PXE network installer
-   \mkdir -p $OUTPUT_DIST_DIR/isolinux/pxeboot
-
-   echo "  Preparing package lists"
-   image_inc_list iso std centos > "${PKGLIST_STX}"
-   image_inc_list iso dev centos > "${PKGLIST_DEV}"
-   image_inc_list iso layer centos ${LAYER} > "${PKGLIST_THIS_LAYER}"
-
-   echo "  Copying base files"
-
-   # Generate .discinfo file
-   date +%s.%N > $OUTPUT_DIST_DIR/isolinux/.discinfo
-   echo $PLATFORM_RELEASE >> $OUTPUT_DIST_DIR/isolinux/.discinfo
-   echo "x86_64" >> $OUTPUT_DIST_DIR/isolinux/.discinfo
- 
-   \cp -L -ru $DISTRO_REPO_DIR/isolinux/* $OUTPUT_DIST_DIR/isolinux/
-   \cp -L -ru $DISTRO_REPO_DIR/images/pxeboot $OUTPUT_DIST_DIR/isolinux/images/
-
-   echo "  Installing startup files"
-
-   \cp -L "$BSP_FILES_PATH/centos.syslinux.cfg" "$OUTPUT_DIST_DIR/isolinux/syslinux.cfg"
-   \cp -L "$BSP_FILES_PATH/centos.syslinux.cfg" "$OUTPUT_DIST_DIR/isolinux/isolinux.cfg"
-   sed -i 's/wr_usb_boot/oe_iso_boot/' $OUTPUT_DIST_DIR/isolinux/isolinux.cfg
-
-   # Modify the isolinux.cfg to auto install if requested
-   # Option 0 is Controller(serial). Option 2 is CPE serial.
-   if [ "$AUTO_INSTALL" == "controller" ] ; then
-       echo "Modifying ISO to auto-install controller load"
-       perl -p -i -e 's/timeout 0/timeout 1\ndefault 0/'  $OUTPUT_DIST_DIR/isolinux/isolinux.cfg
-   elif [ "$AUTO_INSTALL" == "cpe" ] ; then
-       echo "Modifying ISO to auto-install CPE (combined load)"
-       perl -p -i -e 's/timeout 0/timeout 1\ndefault 2/'  $OUTPUT_DIST_DIR/isolinux/isolinux.cfg
-   fi
-
-   # Modify the device if requested
-   if [ ! -z "$DEVICE" ] ; then
-       echo "Modifying ISO to use device $DEVICE"
-       perl -p -i -e "s/device=sda/device=${DEVICE}/g"  $OUTPUT_DIST_DIR/isolinux/isolinux.cfg
-   fi
-
-   # Copy UEFI files
-   \cp -L -ru $DISTRO_REPO_DIR/EFI/* $OUTPUT_DIST_DIR/isolinux/EFI/
-   \cp -L "$BSP_FILES_PATH/grub.cfg" "$OUTPUT_DIST_DIR/isolinux/EFI/BOOT/grub.cfg"
-   \cp -L "$BSP_FILES_PATH/pxeboot_grub.cfg" "$OUTPUT_DIST_DIR/isolinux/pxeboot/pxeboot_grub.cfg"
-
-   # Update the efiboot.img (See https://wiki.archlinux.org/index.php/Remastering_the_Install_ISO)
-   # We need to mount the image file, replace the grub.cfg file with the StarlingX one, and unmount.
-   # Script update-efiboot-image will do this. If there is not loop device on the build machine
-   # then this script must be executed manually prior.
-
-   if [ ! -e "/dev/loop-control"  -a ! -f "$OUTPUT_DIR/efiboot.img" ]; then
-      CMD="export PROJECT=$PROJECT; \
-           export SRC_BUILD_ENVIRONMENT=$SRC_BUILD_ENVIRONMENT; \
-           export MY_BUILD_ENVIRONMENT=$MY_BUILD_ENVIRONMENT; \
-           export MY_BUILD_ENVIRONMENT_FILE=$MY_BUILD_ENVIRONMENT_FILE; \
-           export MY_BUILD_DIR=$MY_BUILD_DIR; \
-           export MY_WORKSPACE=$MY_WORKSPACE; \
-           export MY_REPO=$MY_REPO; \
-           export LAYER=$LAYER; \
-           export MY_BUILD_CFG=$MY_BUILD_CFG; \
-           export MY_MOCK_ROOT=$MY_MOCK_ROOT; \
-           export PATH=$MY_REPO/build-tools:\$PATH; \
-           export BUILD_ISO_USE_UDEV=$BUILD_ISO_USE_UDEV; \
-           export BSP_FILES_PATH=$BSP_FILES_PATH; \
-           update-efiboot-image"
-      echo $CMD
-
-      if [ "$HOSTNAME" == "yow-cgts3-centos7" ]; then
-         echo "Attempting to run update-efiboot-image on yow-cgts3-lx"
-         ssh -o StrictHostKeyChecking=no yow-cgts3-lx "$CMD"
-         if [ $? -ne 0 ]; then
-            echo "Failed to run update-efiboot-image on yow-cgts3-lx"
-         fi
-      fi
-
-      if [ "$HOSTNAME" == "yow-cgts2-centos7" ]; then
-         echo "Attempting to run update-efiboot-image on yow-cgts2-lx"
-         ssh -o StrictHostKeyChecking=no yow-cgts2-lx "$CMD"
-         if [ $? -ne 0 ]; then
-            echo "Failed to run update-efiboot-image on yow-cgts2-lx"
-         fi
-      fi
-   fi
-
-   if [ ! -e "/dev/loop-control"  -a ! -f "$OUTPUT_DIR/efiboot.img" ]; then
-      printf "\n**************************************************************************************************** \n"
-      printf "No loop device on this machine. Please ensure $OUTPUT_DIR/efiboot.img \n"
-      printf "exist prior to executing build-iso by.  It can be created by running \n"
-      printf "   $INTERNAL_REPO_ROOT/build-tools/update-efiboot-image \n"
-      printf "on a machine that does support a loop device.  Please ensure all standard \n"
-      printf "build environment variables are defined (e.g. MY_REPO, MY_WORKSPACE, etc.). \n"
-      printf " \n"
-      printf "e.g. If building on yow-cgts3-centos7, you'll want to run the script on \n"
-      printf "     yow-cgts3-lx which shares the same file system, but supports loop devices \n"
-      printf "****************************************************************************************************** \n"
-      exit 1
-   fi
-
-   if [ -f "$OUTPUT_DIR/efiboot.img" ]; then
-
-      # The script update-efiboot-image was run outside the build-iso script, do nothing.
-      printf "  The image file $OUTPUT_DIR/efiboot.img already exists\n"
-   else
-      printf "  The image file $OUTPUT_DIR/efiboot.img does not exist \n"
-      if [ ! -f "$INTERNAL_REPO_ROOT/build-tools/update-efiboot-image" ]; then
-          printf "*** Error: script update-efiboot-image does not exist *** \n"
-          exit 1
-      fi
-
-      # Run the script
-      BUILD_ISO_USE_UDEV=$BUILD_ISO_USE_UDEV $INTERNAL_REPO_ROOT/build-tools/update-efiboot-image
-      RET=$?
-      if [ $RET != 0 ]; then
-          printf "*** Error: update-efiboot-image script returned failure $RET *** \n"
-          exit 1
-      fi
-
-   fi
-
-   \cp -L $OUTPUT_DIR/efiboot.img $OUTPUT_DIST_DIR/isolinux/images/
-   \rm -f $OUTPUT_DIR/efiboot.img
-
-   # Copy and set up pxeboot setup files
-   \cp "$BSP_FILES_PATH/pxeboot_setup.sh" "$OUTPUT_DIST_DIR/isolinux/pxeboot_setup.sh"
-   \cp "$BSP_FILES_PATH/pxeboot.cfg" "$OUTPUT_DIST_DIR/isolinux/pxeboot/pxeboot.cfg"
-   chmod +x $OUTPUT_DIST_DIR/isolinux/pxeboot_setup.sh
-
-   \rm -f $OUTPUT_DIST_DIR/comps.xml
-   \cp -L $INTERNAL_REPO_ROOT/build-tools/build_iso/comps.xml.gz $OUTPUT_DIST_DIR/
-   gunzip $OUTPUT_DIST_DIR/comps.xml.gz
-
-   TMP_DIR=$MY_WORKSPACE/tmp
-   \mkdir -p $TMP_DIR
-   TMPDIR=$TMP_DIR yum clean all -c $MY_YUM_CONF
-   \rm -rf $TMP_DIR/yum-$USER-*
-   echo "  Done"
-   echo ""
-}
-
-function package_content_checksum {
-    local p=$1
-    local md5
-    local r
-    r=$(basename $p)
-    md5=$( ( rpm2cpio $p;
-             rpm -q --info -p $p;
-             rpm -q --dump -p $p;
-             rpm -q --scripts -p $p ) | md5sum | cut -d ' ' -f 1)
-    echo "$r $md5"
-}
-
-function final_touches {
-   OLD_PWD=$PWD
-
-   # Update the comps.xml
-   if [ ! -f $OUTPUT_DIST_DIR/comps.xml.bak ]; then
-      \cp $OUTPUT_DIST_DIR/comps.xml $OUTPUT_DIST_DIR/comps.xml.bak
-   fi
-
-   local EXTRA_ARGS=""
-   if [ "x${RELEASE_BUILD}" == "x" ]; then
-      EXTRA_ARGS="--pkglist '${PKGLIST_DEV}'"
-   fi
-
-   for PKGLIST_LOWER_LAYER in ${PKGLIST_LOWER_LAYER_LIST}; do
-       EXTRA_ARGS+=" --pkglist ${PKGLIST_LOWER_LAYER}"
-   done
-
-   python "$BSP_FILES_PATH/platform_comps.py" \
-      --groups "$OUTPUT_DIST_DIR/comps.xml" \
-      --pkglist "${PKGLIST_MINIMAL}" \
-      --pkglist "${PKGLIST_STX}" \
-      --pkglist "${PKGLIST_THIS_LAYER}" \
-      ${EXTRA_ARGS}
-   if [ $? -ne 0 ]; then
-      echo "Failed to update comps.xml"
-      exit 1
-   fi
-
-   # create the repo
-   \cd $OUTPUT_DIST_DIR/isolinux
-   $CREATEREPO -q -g ../comps.xml .
-
-   # Create package_checksums
-   printf "creating package_checksums file\n"
-   for r in $(ls Packages/*rpm); do
-      package_content_checksum $r
-   done > package_checksums
-
-   # build the ISO
-   printf "Building image $OUTPUT_FILE\n"
-   \cd $OUTPUT_DIST_DIR
-   chmod 664 isolinux/isolinux.bin
-   mkisofs -o $OUTPUT_FILE \
-      -R -D -A 'oe_iso_boot' -V 'oe_iso_boot' \
-      -quiet \
-      -b isolinux.bin -c boot.cat -no-emul-boot \
-      -boot-load-size 4 -boot-info-table \
-      -eltorito-alt-boot \
-      -e images/efiboot.img \
-            -no-emul-boot \
-      isolinux/ 
-
-   isohybrid --uefi $OUTPUT_FILE
-   implantisomd5 $OUTPUT_FILE
-
-   \cd $OLD_PWD
-}
-
-function extract_pkg_from_local_repo {
-   local pkgname=$1
-   local pkg_mgr_conf=$2
-   shift 2
-
-   local repoid=""
-   local repoid_arg=""
-
-   for repoid in $@; do
-      repoid_arg+=" --repoid=${repoid}"
-   done
-
-   echo "TMPDIR=$TMP_DIR"\
-        "${REPOQUERY} --config=${pkg_mgr_conf} ${repoid_arg}"\
-        "${REPOQUERY_SUB_COMMAND} --location"\
-        "--arch=noarch,x86_64 -q ${pkgname}"
-   local pkgfile=$(TMPDIR=$TMP_DIR \
-                 ${REPOQUERY} --config=${pkg_mgr_conf} ${repoid_arg} \
-                 ${REPOQUERY_SUB_COMMAND} --location \
-                 --arch=noarch,x86_64 -q ${pkgname})
-   if [ -z "${pkgfile}" ]; then
-      echo "Could not find package $pkgname in $@"
-      exit 1
-   fi
-
-   rpm2cpio ${pkgfile/file://} | cpio -idmv
-   if [ $? -ne 0 ]; then
-      echo "Failed to extract files from ${pkgfile/file://}"
-      exit 1
-   fi
-}
-
-function extract_installer_files {
-   # Changes to copied files here must also be reflected in patch-iso
-
-   PKGDIR=$OUTPUT_DIST_DIR/isolinux/Packages
-
-   (
-   \cd $OUTPUT_DIR
-   \rm -rf kickstarts extra_cfgs kickstart.work
-   \mkdir kickstarts extra_cfgs kickstart.work
-
-   echo "Retrieving kickstarts..."
-
-   \cd kickstart.work
-
-   echo "MY_YUM_CONF=${MY_YUM_CONF}"
-   cat ${MY_YUM_CONF}
-   extract_pkg_from_local_repo platform-kickstarts ${MY_YUM_CONF} ${STD_REPO_ID} ${LOWER_LAYER_STD_REPO_ID}
-   extract_pkg_from_local_repo platform-kickstarts-pxeboot ${MY_YUM_CONF} ${STD_REPO_ID} ${LOWER_LAYER_STD_REPO_ID}
-   extract_pkg_from_local_repo platform-kickstarts-extracfgs ${MY_YUM_CONF} ${STD_REPO_ID} ${LOWER_LAYER_STD_REPO_ID}
-
-   \cp --preserve=all var/www/pages/feed/rel-*/*.cfg pxeboot/*.cfg ../kickstarts/ &&
-   \cp --preserve=all extra_cfgs/*.cfg ../extra_cfgs/
-   if [ $? -ne 0 ]; then
-      echo "Failed to copy extracted kickstarts"
-      exit 1
-   fi
-
-   \cd ..
-
-   # Copy kickstarts to ISO
-   \cp --preserve=all kickstarts/controller_ks.cfg $OUTPUT_DIST_DIR/isolinux/ks.cfg
-   # Modify the kickstart to shutdown instead of reboot if doing an auto install
-   if [ ! -z "$AUTO_INSTALL" ] ; then
-      sed -i 's/^reboot --eject/shutdown/' $OUTPUT_DIST_DIR/isolinux/ks.cfg
-   fi
-
-   \mv kickstarts/pxeboot* $OUTPUT_DIST_DIR/isolinux/pxeboot/
-   \cp --preserve=all kickstarts/* $OUTPUT_DIST_DIR/isolinux
-
-   # Update OAM interface for cumulus auto install
-   if [ $CUMULUS -eq 1 ]; then
-       # Cumulus wants tty1
-       perl -p -i -e 's/console=tty0/console=tty1/'  $OUTPUT_DIST_DIR/isolinux/isolinux.cfg
-
-       # CUMULUS setup scripts specify ens3 for OAM
-       OAM_IFNAME=ens3
-
-       cat <<EOM >> $OUTPUT_DIST_DIR/isolinux/ks.cfg
-%post
-#For cumulus tis on tis automated install
-cat << EOF > /etc/sysconfig/network-scripts/ifcfg-${OAM_IFNAME}
-IPADDR=10.10.10.3
-NETMASK=255.255.255.0
-BOOTPROTO=static
-ONBOOT=yes
-DEVICE=${OAM_IFNAME}
-MTU=1500
-GATEWAY=10.10.10.1
-EOF
-%end
-EOM
-   fi
-
-   # For PXE boot network installer
-
-   echo ${OUTPUT_DIST_DIR}/isolinux/Packages
-
-   local WORKDIR=pxe-network-installer.content
-   local ORIG_PWD=$PWD
-
-   \rm -rf $WORKDIR
-   \mkdir $WORKDIR
-   \cd $WORKDIR
-
-   extract_pkg_from_local_repo pxe-network-installer ${MY_YUM_CONF} ${STD_REPO_ID} ${LOWER_LAYER_STD_REPO_ID}
-   extract_pkg_from_local_repo grub2-efi-x64-pxeboot ${MY_YUM_CONF} ${STD_REPO_ID} ${LOWER_LAYER_STD_REPO_ID}
-   extract_pkg_from_local_repo grub2-efi-x64-modules ${MY_YUM_CONF} ${STD_REPO_ID} ${LOWER_LAYER_STD_REPO_ID}
-
-   \mkdir -p $OUTPUT_DIST_DIR/isolinux/pxeboot/EFI/centos/x86_64-efi
-
-   \cp --preserve=all var/pxeboot/pxelinux.0 var/pxeboot/menu.c32 var/pxeboot/chain.c32 $OUTPUT_DIST_DIR/isolinux/pxeboot &&
-   \cp --preserve=all usr/lib/grub/x86_64-efi/* $OUTPUT_DIST_DIR/isolinux/pxeboot/EFI/centos/x86_64-efi/ &&
-   \cp --preserve=all var/pxeboot/EFI/grubx64.efi $OUTPUT_DIST_DIR/isolinux/pxeboot/EFI/
-   if [ $? -ne 0 ]; then
-      echo "Error: Could not copy all files from installer"
-      exit 1
-   fi
- 
-   \cp --preserve=all var/www/pages/feed/rel-*/LiveOS/squashfs.img $OUTPUT_DIST_DIR/isolinux/LiveOS
-   if [ $? -ne 0 ]; then
-      echo "Error: Could not copy squashfs from LiveOS"
-      exit 1
-   fi
-
-
-   # Replace vmlinuz and initrd.img with our own pre-built ones
-   \rm -f \
-      $OUTPUT_DIST_DIR/isolinux/vmlinuz \
-      $OUTPUT_DIST_DIR/isolinux/images/pxeboot/vmlinuz \
-      $OUTPUT_DIST_DIR/isolinux/initrd.img \
-      $OUTPUT_DIST_DIR/isolinux/images/pxeboot/initrd.img
-   \cp --preserve=all var/pxeboot/rel-*/installer-bzImage_1.0 \
-      $OUTPUT_DIST_DIR/isolinux/vmlinuz &&
-   \cp --preserve=all var/pxeboot/rel-*/installer-bzImage_1.0 \
-      $OUTPUT_DIST_DIR/isolinux/images/pxeboot/vmlinuz &&
-   \cp --preserve=all var/pxeboot/rel-*/installer-intel-x86-64-initrd_1.0 \
-      $OUTPUT_DIST_DIR/isolinux/initrd.img &&
-   \cp --preserve=all var/pxeboot/rel-*/installer-intel-x86-64-initrd_1.0 \
-      $OUTPUT_DIST_DIR/isolinux/images/pxeboot/initrd.img
-
-   if [ $? -ne 0 ]; then
-      echo "Error: Failed to copy installer images"
-      exit 1
-   fi
-
-   \cd $ORIG_PWD
-   \rm -rf $WORKDIR
-   )
-   if [ $? -ne 0 ]; then
-      exit 1
-   fi
-}
-
-function setup_upgrades_files {
-   # Changes to copied files here must also be reflected in patch-iso
-
-   # Copy the upgrade files
-   UPGRADES_DIR="$OUTPUT_DIST_DIR/isolinux/upgrades"
-   \rm -rf $UPGRADES_DIR
-   \mkdir -p $UPGRADES_DIR
-   \cp $BSP_FILES_PATH/upgrades/* $UPGRADES_DIR
-   sed -i "s/xxxSW_VERSIONxxx/${PLATFORM_RELEASE}/g" $UPGRADES_DIR/metadata.xml
-   chmod +x $UPGRADES_DIR/*.sh
-   # Write the version out (used in upgrade scripts - this is the same as SW_VERSION)
-   echo "VERSION=$PLATFORM_RELEASE" > $UPGRADES_DIR/version
-}
-
-function sign_iso {
-    # Sign the .iso with the developer private key
-    # Sigining with the formal key is only to be done for customer release
-    # builds
-    local isofilename=$(basename $OUTPUT_DIR/$DEST_FILE)
-    local isofilenoext="${isofilename%.*}"
-    openssl dgst -sha256 -sign ${MY_REPO}/build-tools/signing/dev-private-key.pem -binary -out $OUTPUT_DIR/$isofilenoext.sig $OUTPUT_DIR/$DEST_FILE
-}
-
-#############################################
-# Main code
-#############################################
-
-# Check args
-HELP=0
-CLEAN_FLAG=1 # TODO -- doesn't yet work without --clean
-DEST_FILE=bootimage.iso
-AUTO_FLAG=0
-AUTO_INSTALL=""
-CUMULUS=0
-SIGN_RPM_FILES=1
-DEVICE=""
-if [ -z "$BUILD_ISO_USE_UDEV" ]; then
-    BUILD_ISO_USE_UDEV=0
-fi
-
-# read the options
-TEMP=`getopt -o hf:a:d: --long help,file:,auto:,device:,cumulus,clean,skip-sign,sudo,udev -n 'test.sh' -- "$@"`
-eval set -- "$TEMP"
-
-# extract options and their arguments into variables.
-while true ; do
-    case "$1" in
-        -h|--help) HELP=1 ; shift ;;
-        --clean) CLEAN_FLAG=1 ; shift ;;
-        --skip-sign) SIGN_RPM_FILES=0 ; shift ;;
-        --cumulus) CUMULUS=1 ; shift ;;
-        -f | --file) DEST_FILE="$2"; shift; shift ;;
-        -d | --device) DEVICE="$2"; shift; shift ;;
-        -a | --auto) AUTO_FLAG=1; AUTO_INSTALL="$2"; shift; shift ;;
-        --sudo) BUILD_ISO_USE_UDEV=0 ; shift ;;
-        --udev) BUILD_ISO_USE_UDEV=1 ; shift ;;
-        --) shift ; break ;;
-        *) echo "Internal error!" ; exit 1 ;;
-    esac
-done
-
-if [ $AUTO_FLAG -eq 1 ]; then
-    if [[ "$AUTO_INSTALL" != "controller" && "$AUTO_INSTALL" != "cpe" ]] ; then
-            echo "Unsupported --auto value: $AUTO_INSTALL"
-            exit 1
-    fi
-fi
-
-if [ $HELP -eq 1 ]; then
-   usage
-   exit 0
-fi
-
-
-(
-printf "\n*************************\n"
-printf   "Create StarlingX/CentOS Boot CD\n"
-printf   "*************************\n\n"
-
-# Init variables
-init_vars
-check_vars
-DISTRO="centos"
-
-PKGLIST_MINIMAL="${INTERNAL_REPO_ROOT}/build-tools/build_iso/minimal_rpm_list.txt"
-PKGLIST_STX="${OUTPUT_DIR}/image.inc"
-PKGLIST_DEV="${OUTPUT_DIR}/image-dev.inc"
-PKGLIST_THIS_LAYER="${OUTPUT_DIR}/image-layer.inc"
-PKGLIST_LOWER_LAYER_DIR="${CENTOS_REPO}/layer_image_inc"
-
-PKGLIST_LOWER_LAYER_LIST=""
-if [ -d ${PKGLIST_LOWER_LAYER_DIR} ]; then
-    PKGLIST_LOWER_LAYER_LIST="$(find ${PKGLIST_LOWER_LAYER_DIR} -name '*image.inc')"
-fi
-
-# Create skeleton build dir
-init_output_dir
-
-# Create the vanilla DVD
-echo "Copying vanilla CentOS RPMs"
-install_pkg_list "${PKGLIST_MINIMAL}"
-if [ $? -eq 2 ]; then
-    echo "Error: Failed to install packages from ${PKGLIST_MINIMAL}"
-    exit 1
-fi
-
-# Find all StarlingX packages built locally
-echo "Installing StarlingX packages"
-install_pkg_list "${PKGLIST_STX}"
-if [ $? -eq 2 ]; then
-    echo "Error: Failed to install packages from ${PKGLIST_STX}"
-    exit 1
-fi
-
-
-for PKGLIST_LOWER_LAYER in $PKGLIST_LOWER_LAYER_LIST; do
-    install_pkg_list "${PKGLIST_LOWER_LAYER}"
-    if [ $? -eq 2 ]; then
-        echo "Error: Failed to install packages from ${PKGLIST_LOWER_LAYER}"
-        exit 1
-    fi
-done
-
-if [ "x${RELEASE_BUILD}" == "x" ]; then
-    echo "Installing StarlingX developer packages"
-    install_pkg_list "${PKGLIST_DEV}"
-    if [ $? -eq 2 ]; then
-        echo "Error: Failed to install packages from ${PKGLIST_DEV}"
-        exit 1
-    fi
-
-    for PKGLIST_LOWER_LAYER in $PKGLIST_LOWER_LAYER_LIST; do
-        install_pkg_list "${PKGLIST_LOWER_LAYER}"
-        if [ $? -eq 2 ]; then
-            echo "Error: Failed to install packages from ${PKGLIST_LOWER_LAYER}"
-            exit 1
-        fi
-    done
-fi
-
-\cd $OUTPUT_DIST_DIR
-chmod -R 644 isolinux/Packages/*
-
-# Extract installer files
-extract_installer_files
-
-# Upgrades files
-setup_upgrades_files
-
-# add file signatures to all rpms
-if [ $SIGN_RPM_FILES -ne 0 ]; then
-    sign-rpms -d $OUTPUT_DIST_DIR/isolinux/Packages
-    if [ $? -ne 0 ] ; then
-	echo "failed to add file signatures to RPMs"
-	exit 1
-    fi
-fi
-
-# Finalize and build ISO
-final_touches
-
-# Sign the ISO
-sign_iso
-
-make_report "${PKGLIST_MINIMAL}" "${PKGLIST_STX}" "${PKGLIST_THIS_LAYER}" ${PKGLIST_LOWER_LAYER_LIST}
-
-# Check sanity
-FILESIZE=$(wc -c <"$OUTPUT_FILE")
-if [ $FILESIZE -ge $MINIMUM_EXPECTED_SIZE ]; then
-	printf "Done."
-        printf "Output file: $OUTPUT_FILE\n\n"
-else
-	printf "Output file $OUTPUT_FILE smaller than expected -- probable error\n\n"
-	exit 1
-fi
-
-) 2>&1 | stdbuf -o0 awk '{ print strftime("%H:%M:%S"), $0; fflush(); }' ; exit ${PIPESTATUS[0]}
diff --git a/build-tools/build-pkg-srpm b/build-tools/build-pkg-srpm
deleted file mode 100644
index ae348728..00000000
--- a/build-tools/build-pkg-srpm
+++ /dev/null
@@ -1,63 +0,0 @@
-#!/bin/bash
-
-# Available environment
-# SRC_BASE = absolute path to cgcs-root 
-# AVS_BASE = absolute path to AVS source
-# CGCS_BASE = absolute path to CGCS source
-# RPM_BUILD_BASE = Directory where the package .distro  directory can be found
-# SRPM_OUT = Directory into which SRC RPMS are copied in preparation for mock build
-# RPM_DIR = Directory into which binary RPMs are delivered by mock
-
-SRC_DIR="/sources"
-VERSION=$(grep '^Version:' PKG-INFO | awk -F ': ' '{print $2}' | sed -e 's/^[[:space:]]*//')
-TAR_NAME=$(grep '^Name:' PKG-INFO | awk -F ': ' '{print $2}' | sed -e 's/^[[:space:]]*//')
-CUR_DIR=`pwd`
-BUILD_DIR=".distro/centos7/rpmbuild"
-
-mkdir -p $BUILD_DIR/SRPMS
-
-TAR="$TAR_NAME-$VERSION.tar.gz"
-TAR_PATH="$BUILD_DIR/SOURCES/$TAR"
-
-TAR_NEEDED=0
-if [ -f $TAR_PATH ]; then
-    n=`find . -cnewer $TAR_PATH -and !  -path './.git*' \
-                                -and ! -path './build/*' \
-                                -and ! -path './.pc/*' \
-                                -and ! -path './patches/*' \
-                                -and ! -path './.distro/*' \
-                                -and ! -path './pbr-*.egg/*' \
-                                | wc -l`
-    if [ $n -gt 0 ]; then
-        TAR_NEEDED=1
-    fi
-else
-    TAR_NEEDED=1
-fi
-
-if [ $TAR_NEEDED -gt 0 ]; then
-    tar czvf  $TAR_PATH .$SRC_DIR --exclude '.git*' --exclude 'build' --exclude='.pc' --exclude='patches' --exclude='.distro' --exclude='pbr-*.egg' --transform "s,^\.$SRC_DIR,$TAR_NAME-$VERSION,"
-fi
-
-for SPEC in `ls $BUILD_DIR/SPECS`; do
-    SPEC_PATH="$BUILD_DIR/SPECS/$SPEC"
-    RELEASE=$(grep '^Release:' $SPEC_PATH | awk -F ': ' '{print $2}' | sed -e 's/^[[:space:]]*//')
-    NAME=`echo $SPEC | sed 's/.spec$//'`
-    SRPM="$NAME-$VERSION-$RELEASE.src.rpm"
-    SRPM_PATH="$BUILD_DIR/SRPMS/$SRPM"
-
-    BUILD_NEEDED=0
-    if [ -f $SRPM_PATH ]; then
-        n=`find . -cnewer $SRPM_PATH | wc -l`
-        if [ $n -gt 0 ]; then
-            BUILD_NEEDED=1
-        fi
-    else
-        BUILD_NEEDED=1
-    fi
-
-    if [ $BUILD_NEEDED -gt 0 ]; then
-        rpmbuild -bs $SPEC_PATH --define="%_topdir $CUR_DIR/$BUILD_DIR" --define="_tis_dist .tis"
-    fi
-done
-
diff --git a/build-tools/build-pkgs b/build-tools/build-pkgs
deleted file mode 100755
index da9d65d0..00000000
--- a/build-tools/build-pkgs
+++ /dev/null
@@ -1,126 +0,0 @@
-#!/bin/bash
-
-#
-# Copyright (c) 2018-2020 Wind River Systems, Inc.
-#
-# SPDX-License-Identifier: Apache-2.0
-#
-
-#
-# Build first src.rpms, then rpms, from source, or from a downloaded tarball
-# or src.rpm plus our additional patches.
-#
-# This program is a wrapper around build-pkgs-parallel and build-pkgs-serial
-#
-
-BUILD_PKGS_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}" )" )"
-
-# Set REPOQUERY and REPOQUERY_SUB_COMMAND for our build environment.
-source "${BUILD_PKGS_DIR}/pkg-manager-utils.sh"
-
-
-usage () {
-    echo ""
-    echo "Usage: "
-    echo "   Create source and binary rpms:"
-    echo "   build-pkgs [--serial] [args]"
-}
-
-SERIAL_FLAG=0
-RC=0
-
-for arg in "$@"; do
-    case "$1" in
-        --serial) SERIAL_FLAG=1 ;;
-    esac
-done
-
-which mock_tmpfs_umount >> /dev/null
-if [ $? -ne 0 ]; then
-    SERIAL_FLAG=1
-fi
-
-export TMPDIR=$MY_WORKSPACE/tmp
-mkdir -p $TMPDIR
-
-# Old repo path or new?
-LOCAL_REPO=${MY_REPO}/local-repo
-if [ ! -d ${LOCAL_REPO} ]; then
-    LOCAL_REPO=${MY_REPO}/cgcs-tis-repo
-    if [ ! -d ${LOCAL_REPO} ]; then
-        # This one isn't fatal, LOCAL_REPO is not required
-        LOCAL_REPO=${MY_REPO}/local-repo
-    fi
-fi
-
-# Make sure we have a dependency cache
-DEP_CACHE="${LOCAL_REPO}/dependancy-cache"
-
-BUILD_TYPES=(" std rt installer containers")
-DEP_RPM_TYPE=(" RPMS SRPMS ")
-DEP_DELTAS="$DEP_CACHE/deltas-rpms-srpms"
-
-make_cache_current_rpms () {
-
-    FILE=${1}
-
-    if [ -z "${FILE}" ]; then
-        echo "File not specified"
-        return;
-    fi
-
-    if [ -f ${FILE} ]; then
-        rm ${FILE}
-    fi
-
-    for build_type in $BUILD_TYPES; do
-        for rpm_type in $DEP_RPM_TYPE; do
-
-            if  [ -d $MY_WORKSPACE/$build_type/rpmbuild/$rpm_type/repodata ]; then
-                current=$MY_WORKSPACE/$build_type/rpmbuild/$rpm_type/
-
-                ${REPOQUERY} \
-                    --repofrompath=$build_type-$rpm_type,$current \
-                    --repoid=$build_type-$rpm_type --arch=noarch,src,x86_64 \
-                    ${REPOQUERY_SUB_COMMAND} \
-                    --all \
-                    --qf "%-10{repoid} %-40{name} %-10{version} %-10{release}" \
-                    >> ${FILE}
-
-                \rm -rf $TMP_DIR/yum-$USER-*
-            fi
-        done;
-    done;
-}
-
-if [ ! -d $DEP_CACHE ]; then
-    echo "Dependency cache is missing.  Creating it now."
-    $BUILD_PKGS_DIR/create_dependancy_cache.py > $MY_WORKSPACE/create_dependancy_cache.log
-    make_cache_current_rpms $DEP_DELTAS
-    echo "Dependency cache created."
-else
-    DEP_TMP=$(mktemp)
-    make_cache_current_rpms $DEP_TMP
-    if diff $DEP_DELTAS $DEP_TMP > /dev/null; then
-        echo "No changes for stx projects"
-        rm $DEP_TMP
-    else
-        echo "Changes detected for stx projects"
-        echo "Recreating dependecy cache now."
-        mv $DEP_TMP $DEP_DELTAS
-        $BUILD_PKGS_DIR/create_dependancy_cache.py > $MY_WORKSPACE/create_dependancy_cache.log
-        echo "Dependency cache recreated."
-    fi
-fi
-
-if [ $SERIAL_FLAG -eq 1 ]; then
-    echo "build-pkgs-serial $@"
-    build-pkgs-serial "$@"
-    RC=$?
-else
-    echo "build-pkgs-parallel $@"
-    build-pkgs-parallel "$@"
-    RC=$?
-fi
-
-exit $RC
diff --git a/build-tools/build-pkgs-parallel b/build-tools/build-pkgs-parallel
deleted file mode 100755
index bc65b3c8..00000000
--- a/build-tools/build-pkgs-parallel
+++ /dev/null
@@ -1,538 +0,0 @@
-#!/bin/bash
-
-#
-# Copyright (c) 2018 Wind River Systems, Inc.
-#
-# SPDX-License-Identifier: Apache-2.0
-#
-
-#
-# Build first src.rpms, then rpms, from source, or from a downloaded tarball
-# or src.rpm plus our additional patches.
-#
-# This program is a wrapper around build-srpms-parallel and build-rpms-parallel
-#
-
-BUILD_PKGS_PARALLEL_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}" )" )"
-
-source "${BUILD_PKGS_PARALLEL_DIR}/git-utils.sh"
-source "${BUILD_PKGS_PARALLEL_DIR}/spec-utils"
-
-usage () {
-    echo ""
-    echo "Usage: "
-    echo "   Create source and Binary rpms:"
-    echo "   Build optimizations (--no-descendants, --no-required, --no-build-info,"
-    echo "   --no-autoclean, --no-build-avoidance) are not recommended for the first build"
-    echo "   after a clone/pull, nor the final build prior to creating an iso or patch,"
-    echo "   but can be used for intermediate builds."
-    echo "   i.e. while debugging compilation failures."
-    echo "      build-pkgs-parallel [--layer] [--build-avoidance | --no-build-avoidance] [--no-descendants] [--no-required] [--no-build-info] [--no-autoclean] [--careful] [--formal] [ list of package names ]"
-    echo ""
-    echo "   Test build dependencies of a package:"
-    echo "   Note: A full build of all packages should preceed the dependency test build"
-    echo "      build-pkgs-parallel --dep-test <package_name>"
-    echo ""
-    echo "   Delete source rpms, and the directories associated with it's creation:"
-    echo "   Note: does not clean an edit environment"
-    echo "      build-pkgs-parallel --clean [--build-avoidance | --no-build-avoidance] [ list of package names ]"
-    echo ""
-    echo "   Extract an src.rpm into a pair of git trees to aid in editing it's contents,"
-    echo "   one for source code and one for metadata such as the spec file."
-    echo "   If --no-meta-patch is specified, then WRS patches are omitted."
-    echo "      build-pkgs-parallel --edit [--no-meta-patch] [ list of package names ]"
-    echo ""
-    echo "   Delete an edit environment"
-    echo "      build-pkgs-parallel --edit --clean [ list of package names ]"
-    echo ""
-    echo "   This help page"
-    echo "      build-pkgs-parallel [--help]"
-    echo ""
-}
-
-
-HELP=0
-CLEAN_FLAG=0
-EDIT_FLAG=0
-APPEND_LOG_FLAG=0
-BUILD_AVOIDANCE_FLAG=0
-STD_BUILD=1
-RT_BUILD=1
-INSTALLER_BUILD=0
-CONTAINERS_BUILD=0
-DEP_TEST_FLAG=0
-
-export BUILD_AVOIDANCE_URL=""
-
-# For backward compatibility.  Old repo location or new?
-CENTOS_REPO=${MY_REPO}/centos-repo
-if [ ! -d ${CENTOS_REPO} ]; then
-    CENTOS_REPO=${MY_REPO}/cgcs-centos-repo
-    if [ ! -d ${CENTOS_REPO} ]; then
-        echo "ERROR: directory ${MY_REPO}/centos-repo not found."
-        exit 1
-    fi
-fi
-
-# read the options
-TEMP=$(getopt -o h --long parallel,rt,std,installer,containers,layer:,edit,build-avoidance,no-build-avoidance,build-avoidance-dir:,build-avoidance-host:,build-avoidance-user:,build-avoidance-day:,no-meta-patch,no-descendants,no-required,no-build-info,no-autoclean,formal,careful,help,clean,dep-test,append-log -n 'build-pkgs-parallel' -- "$@")
-if [ $? -ne 0 ]; then
-    usage
-    exit 1
-fi
-eval set -- "$TEMP"
-
-# extract options and their arguments into variables.
-EXTRA_ARGS_COMMON=""
-EXTRA_ARGS_SRPM=""
-EXTRA_ARGS_RPM=""
-
-export BUILD_AVOIDANCE_OVERRIDE_DIR=""
-export BUILD_AVOIDANCE_OVERRIDE_HOST=""
-export BUILD_AVOIDANCE_OVERRIDE_USR=""
-export BUILD_AVOIDANCE_DAY=""
-
-while true ; do
-    case "$1" in
-        --append-log)     APPEND_LOG_FLAG=1 ; shift ;;
-        --build-avoidance) BUILD_AVOIDANCE_FLAG=1 ; shift ;;
-        --no-build-avoidance) BUILD_AVOIDANCE_FLAG=0 ; shift ;;
-        --build-avoidance-dir)  BUILD_AVOIDANCE_OVERRIDE_DIR=$2; shift 2 ;;
-        --build-avoidance-host)  BUILD_AVOIDANCE_OVERRIDE_HOST=$2; shift 2 ;;
-        --build-avoidance-user)  BUILD_AVOIDANCE_OVERRIDE_USR=$2; shift 2 ;;
-        --build-avoidance-day) BUILD_AVOIDANCE_DAY=$2; shift 2 ;;
-        --no-descendants) EXTRA_ARGS_COMMON+=" --no-descendants" ; shift ;;
-        --formal)         EXTRA_ARGS_COMMON+=" --formal" ; shift ;;
-        --careful)        EXTRA_ARGS_RPM+=" --careful" ; shift ;;
-        --layer)          EXTRA_ARGS_COMMON+=" --layer=$2"; shift 2 ;;
-        --no-required)    EXTRA_ARGS_RPM+=" --no-required" ; shift ;;
-        --no-build-info)  EXTRA_ARGS_COMMON+=" --no-build-info" ; shift ;;
-        --no-autoclean)   EXTRA_ARGS_RPM+=" --no-autoclean" ; shift ;;
-        --no-meta-patch)  EXTRA_ARGS_SRPM+=" --no-meta-patch" ; shift ;;
-        -h|--help)        HELP=1 ; shift ;;
-        --clean)          CLEAN_FLAG=1 ; shift ;;
-        --dep-test)       DEP_TEST_FLAG=1; EXTRA_ARGS_RPM+=" --dep-test"; shift ;;
-        --edit)           EDIT_FLAG=1 ; EXTRA_ARGS_SRPM+=" --edit"; shift ;;
-        --rt)             STD_BUILD=0 ; shift ;;
-        --std)            RT_BUILD=0 ; shift ;;
-        --installer)      INSTALLER_BUILD=1 ; STD_BUILD=0 ; RT_BUILD=0 ; shift ;;
-        --containers)     INSTALLER_BUILD=0 ; STD_BUILD=0 ; RT_BUILD=0 ; CONTAINERS_BUILD=1 ; shift ;;
-        --parallel)       shift ;;
-        --)               shift ; break ;;
-        *)                usage; exit 1 ;;
-    esac
-done
-
-if [ $HELP -eq 1 ]; then
-    usage
-    exit 0
-fi
-
-# Don't source until after BUILD_AVOIDANCE_OVERRIDE_* variables are set.
-source "${BUILD_PKGS_PARALLEL_DIR}/build-avoidance-utils.sh"
-
-function my_exit() {
-    build-rpms-parallel --std --tmpfs-clean
-    build-rpms-parallel --rt --tmpfs-clean
-}
-
-function my_sigint() {
-    echo "build-pkgs-parallel sigint"
-    pkill -SIGABRT -P $BASHPID &> /dev/null
-    echo "build-pkgs-parallel waiting"
-    wait
-    echo "build-pkgs-parallel wait complete"
-
-}
-
-function my_sighup() {
-    echo "build-pkgs-parallel sighup"
-    pkill -SIGABRT -P $BASHPID &> /dev/null
-    echo "build-pkgs-parallel waiting"
-    wait
-    echo "build-pkgs-parallel wait complete"
-}
-
-function my_sigabrt() {
-    echo "build-pkgs-parallel sigabrt"
-    pkill -SIGABRT -P $BASHPID &> /dev/null
-    echo "build-pkgs-parallel waiting"
-    wait
-    echo "build-pkgs-parallel wait complete"
-}
-
-function my_sigterm() {
-    echo "build-pkgs-parallel sigterm"
-    pkill -SIGABRT -P $BASHPID &> /dev/null
-    echo "build-pkgs-parallel waiting"
-    wait
-    echo "build-pkgs-parallel wait complete"
-}
-
-trap my_exit EXIT
-trap my_sigint INT
-trap my_sighup HUP
-trap my_sigabrt ABRT
-trap my_sigterm TERM
-
-# Note: For ease of parsing, a TARGETS list always begins and ends 
-# with a space.  An empty target list consistes of two spaces.
-TARGETS=" $@ "
-EMPTY_TARGETS="  "
-
-TARGETS_STD="$EMPTY_TARGETS"
-TARGETS_RT="$EMPTY_TARGETS"
-TARGETS_INSTALLER="$EMPTY_TARGETS"
-TARGETS_CONTAINERS="$EMPTY_TARGETS"
-TARGETS_MISC="$EMPTY_TARGETS"
-
-find_targets () {
-   local centos_pkg_dirs=$1
-   local d=""
-   local d2=""
-   local g=""
-   local x=""
-   local name=""
-   local path=""
-   local RESULT="$EMPTY_TARGETS"
-   local FOUND=0
-
-   for d in $GIT_LIST; do
-      if [ -f $d/$centos_pkg_dirs ]; then
-         for d2 in $(grep -v '^#' $d/$centos_pkg_dirs); do
-            name=""
-            if [ -f $d/$d2/centos/srpm_path ]; then
-                path=$(cat $d/$d2/centos/srpm_path | head -n 1 | \
-                       sed -e "s#^mirror:CentOS/tis-r3-CentOS/mitaka#${CENTOS_REPO}#" \
-                           -e "s#^mirror:#${CENTOS_REPO}/#" \
-                           -e "s#^repo:#$MY_REPO/#" \
-                           -e "s#^Source/#${CENTOS_REPO}/Source/#")
-                name=$(rpm -q --qf='%{NAME}' --nosignature -p $path)
-            else
-                path=$(find $d/$d2/centos/ -name '*.spec' | head -n 1)
-                if [[ ( -z "$path" ) &&  ( -f $d/$d2/centos/spec_path ) ]]; then
-                    path=$(find $MY_REPO/$(cat $d/$d2/centos/spec_path) -maxdepth 1 -name '*.spec' | head -n 1)
-                fi
-                if [ "$path" != "" ]; then
-                   name=$(spec_find_tag Name "$path" 2>> /dev/null)
-                fi
-            fi
-            if [ "$name" != "" ]; then
-               if [ "$BUILD_TYPE" == "rt" ]; then
-                  FOUND=0
-                  for x in $TARGETS; do
-                     if [ "${x: -3}" == "-rt" ]; then
-                        if [ "${name}" == "$x" ] || [ "${name}-rt" == "${x}" ]; then
-                           RESULT+="$x "
-                           FOUND=1
-                           break
-                        fi
-                     fi
-                  done
-                  if [ $FOUND -eq 0 ]; then
-                     for x in $TARGETS; do
-                        if [ "${name}" == "${x}-rt" ]; then
-                           RESULT+="$x-rt "
-                           FOUND=1
-                           break
-                        else
-                           if [ "${name}" == "$x" ] || [ "${name}-rt" == "${x}" ]; then
-                              RESULT+="$x "
-                              FOUND=1
-                              break
-                           fi
-                        fi
-                     done
-                  fi
-               else
-                  for x in $TARGETS; do
-                     if [ "${name}" == "$x" ]; then
-                         RESULT+="$x "
-                         FOUND=1
-                         break
-                     fi
-                  done
-               fi
-            fi
-         done
-      fi
-   done
-
-   echo "$RESULT"
-   return 0
-}
-
-if [ $EDIT_FLAG -eq 1 ] || [ "$TARGETS" != "$EMPTY_TARGETS" ]; then
-   BUILD_AVOIDANCE_FLAG=0
-fi
-
-echo "BUILD_AVOIDANCE_FLAG=$BUILD_AVOIDANCE_FLAG"
-echo "CLEAN_FLAG=$CLEAN_FLAG"
-echo "EDIT_FLAG=$EDIT_FLAG"
-
-if [ "$TARGETS" != "$EMPTY_TARGETS" ]; then
-   TARGETS_STD="$(find_targets centos_pkg_dirs)"
-
-   BUILD_TYPE_SAVE="$BUILD_TYPE"
-   BUILD_TYPE="rt"
-   TARGETS_RT="$(find_targets centos_pkg_dirs_rt)"
-   BUILD_TYPE="installer"
-   TARGETS_INSTALLER="$(find_targets centos_pkg_dirs_installer)"
-   BUILD_TYPE="containers"
-   TARGETS_CONTAINERS="$(find_targets centos_pkg_dirs_containers)"
-   BUILD_TYPE="$BUILD_TYPE_SAVE"
-
-   echo "TARGETS_STD=$TARGETS_STD"
-   echo "TARGETS_RT=$TARGETS_RT"
-   echo "TARGETS_INSTALLER=$TARGETS_INSTALLER"
-   echo "TARGETS_CONTAINERS=$TARGETS_CONTAINERS"
-
-   for x in $TARGETS; do
-       if [[ $TARGETS_STD == *" $x "* ]]
-       then
-           echo "found $x" >> /dev/null;
-       else
-           if [[ $TARGETS_RT == *" $x "* ]]
-           then
-               echo "found $x" >> /dev/null;
-           else
-               if [[ $TARGETS_INSTALLER == *" $x "* ]]
-               then
-                   echo "found $x" >> /dev/null;
-                   INSTALLER_BUILD=1
-               else
-                   if [[ $TARGETS_CONTAINERS == *" $x "* ]]
-                   then
-                       echo "found $x" >> /dev/null;
-                       CONTAINERS_BUILD=1
-                   else
-                       TARGETS_MISC+="$x "
-                   fi
-               fi
-           fi
-       fi
-   done
-fi
-
-echo "EXTRA_ARGS_COMMON='$EXTRA_ARGS_COMMON'"
-echo "EXTRA_ARGS_SRPM='$EXTRA_ARGS_SRPM'"
-echo "EXTRA_ARGS_RPM='$EXTRA_ARGS_RPM'"
-echo "TARGETS='$TARGETS'"
-echo "TARGETS_STD='$TARGETS_STD'"
-echo "TARGETS_RT='$TARGETS_RT'"
-echo "TARGETS_INSTALLER='$TARGETS_INSTALLER'"
-echo "TARGETS_CONTAINERS='$TARGETS_CONTAINERS'"
-echo "TARGETS_MISC='$TARGETS_MISC'"
-
-if [ $CLEAN_FLAG -eq 1 ]; then
-
-   if [ "$TARGETS" == "$EMPTY_TARGETS" ] && [ $BUILD_AVOIDANCE_FLAG -eq 1 ] ; then
-      build_avoidance_clean
-   fi
-
-   if [ $STD_BUILD -eq 1 ]; then
-      if [ "$TARGETS" == "$EMPTY_TARGETS" ] || [ "$TARGETS_STD" != "$EMPTY_TARGETS" ] || [ "$TARGETS_MISC" != "$EMPTY_TARGETS" ]; then
-         if [ $EDIT_FLAG -ne 1 ]; then
-            echo "${BUILD_PKGS_PARALLEL_DIR}/build-rpms-parallel --std --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_RPM $TARGETS_STD $TARGETS_MISC"
-            ${BUILD_PKGS_PARALLEL_DIR}/build-rpms-parallel --std --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_RPM $TARGETS_STD $TARGETS_MISC || exit 1
-
-         fi
-      fi
-      if [ "$TARGETS" == "$EMPTY_TARGETS" ] || [ "$TARGETS_STD" != "$EMPTY_TARGETS" ] || [ "$TARGETS_MISC" != "$EMPTY_TARGETS" ]; then
-         echo "${BUILD_PKGS_PARALLEL_DIR}/build-srpms-parallel --std --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_SRPM $TARGETS_STD $TARGETS_MISC"
-         ${BUILD_PKGS_PARALLEL_DIR}/build-srpms-parallel --std --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_SRPM $TARGETS_STD $TARGETS_MISC || exit 1
-      fi
-   fi
-
-   if [ $RT_BUILD -eq 1 ]; then
-      if [ "$TARGETS" == "$EMPTY_TARGETS" ] || [ "$TARGETS_RT" != "$EMPTY_TARGETS" ] || [ "$TARGETS_MISC" != "$EMPTY_TARGETS" ]; then
-         if [ $EDIT_FLAG -ne 1 ]; then
-            echo "${BUILD_PKGS_PARALLEL_DIR}/build-rpms-parallel --rt --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_RPM $TARGETS_RT $TARGETS_MISC"
-            ${BUILD_PKGS_PARALLEL_DIR}/build-rpms-parallel --rt --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_RPM $TARGETS_RT $TARGETS_MISC || exit 1
-         fi
-      fi
-      if [ "$TARGETS" == "$EMPTY_TARGETS" ] || [ "$TARGETS_RT" != "$EMPTY_TARGETS" ] || [ "$TARGETS_MISC" != "$EMPTY_TARGETS" ]; then
-         echo "${BUILD_PKGS_PARALLEL_DIR}/build-srpms-parallel --rt --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_SRPM $TARGETS_RT $TARGETS_MISC"
-         ${BUILD_PKGS_PARALLEL_DIR}/build-srpms-parallel --rt --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_SRPM $TARGETS_RT $TARGETS_MISC || exit 1
-      fi
-   fi
-
-   if [ $INSTALLER_BUILD -eq 1 ]; then
-      if [ "$TARGETS" == "$EMPTY_TARGETS" ] || [ "$TARGETS_INSTALLER" != "$EMPTY_TARGETS" ]; then
-         if [ $EDIT_FLAG -ne 1 ]; then
-            echo "${BUILD_PKGS_PARALLEL_DIR}/build-rpms-parallel --installer --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_RPM $TARGETS_INSTALLER"
-            ${BUILD_PKGS_PARALLEL_DIR}/build-rpms-parallel --installer --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_RPM $TARGETS_INSTALLER || exit 1
-         fi
-      fi
-      if [ "$TARGETS" == "$EMPTY_TARGETS" ] || [ "$TARGETS_INSTALLER" != "$EMPTY_TARGETS" ]; then
-         echo "${BUILD_PKGS_PARALLEL_DIR}/build-srpms-parallel --installer --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_SRPM $TARGETS_INSTALLER"
-         ${BUILD_PKGS_PARALLEL_DIR}/build-srpms-parallel --installer --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_SRPM $TARGETS_INSTALLER || exit 1
-      fi
-   fi
-
-   if [ $CONTAINERS_BUILD -eq 1 ]; then
-      if [ "$TARGETS" == "$EMPTY_TARGETS" ] || [ "$TARGETS_CONTAINERS" != "$EMPTY_TARGETS" ]; then
-         if [ $EDIT_FLAG -ne 1 ]; then
-            echo "${BUILD_PKGS_PARALLEL_DIR}/build-rpms-parallel --containers --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_RPM $TARGETS_CONTAINERS"
-            ${BUILD_PKGS_PARALLEL_DIR}/build-rpms-parallel --containers --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_RPM $TARGETS_CONTAINERS || exit 1
-         fi
-      fi
-      if [ "$TARGETS" == "$EMPTY_TARGETS" ] || [ "$TARGETS_CONTAINERS" != "$EMPTY_TARGETS" ]; then
-         echo "${BUILD_PKGS_PARALLEL_DIR}/build-srpms-parallel --containers --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_SRPM $TARGETS_CONTAINERS"
-         ${BUILD_PKGS_PARALLEL_DIR}/build-srpms-parallel --containers --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_SRPM $TARGETS_CONTAINERS || exit 1
-      fi
-   fi
-
-   exit $?
-fi
-
-function launch_build()
-{
-   local build_type=$1
-   shift
-
-   local logfile=$MY_WORKSPACE/build-$build_type.log
-   local rc
-   local targets
-
-   if [ "$build_type" == "std" ]; then
-      targets="$TARGETS_STD $TARGETS_MISC"
-   else
-      if [ "$build_type" == "rt" ]; then
-         targets="$TARGETS_RT $TARGETS_MISC"
-      else
-         if [ "$build_type" == "installer" ]; then
-            targets="$TARGETS_INSTALLER $TARGETS_MISC"
-         else
-            if [ "$build_type" == "containers" ]; then
-               targets="$TARGETS_CONTAINERS $TARGETS_MISC"
-            else
-               targets="$TARGETS"
-            fi
-         fi
-      fi
-   fi
-
-   echo "Launching $build_type build, logging to $logfile"
-   if [ $APPEND_LOG_FLAG -eq 0 ] && [ -f $logfile ]; then
-       \rm $logfile
-   fi
-
-
-   echo -e "\n######## $(date): Launching build-srpms-parallel --$build_type $EXTRA_ARGS $@\n" | tee --append $logfile
-
-   if [ $BUILD_AVOIDANCE_FLAG -eq 1 ]; then
-      # Build Avoidance requested. Get URL of a usable context, if any.
-      export BUILD_AVOIDANCE_URL=$(get_build_avoidance_context $build_type)
-   fi
-
-   echo "BUILD_AVOIDANCE_URL=$BUILD_AVOIDANCE_URL" | tee --append $logfile
-   if [ "x$BUILD_AVOIDANCE_URL" != "x" ]; then
-      echo "build_avoidance $build_type" | tee --append $logfile
-      build_avoidance $build_type 2>&1 | tee --append $logfile
-   fi
-
-   # No clean flag, call build-srpms-parallel followed by build-rpms-parallel
-   echo "${BUILD_PKGS_PARALLEL_DIR}/build-srpms-parallel --$build_type $EXTRA_ARGS_COMMON $EXTRA_ARGS_SRPM $targets" | tee --append $logfile
-   ${BUILD_PKGS_PARALLEL_DIR}/build-srpms-parallel --$build_type $EXTRA_ARGS_COMMON $EXTRA_ARGS_SRPM $targets 2>&1 | tee --append $logfile
-   rc=${PIPESTATUS[0]}
-   if [ $rc -eq 0 ]; then
-      echo -e "\n######## $(date): build-srpm-parallel --$build_type was successful" | tee --append $logfile
-   else
-      echo -e "\n######## $(date): build-srpm-parallel --$build_type failed with rc=$rc" | tee --append $logfile
-      echo -e "\n$(date): build-srpm-parallel --$build_type failed with rc=$rc"
-      exit $rc
-   fi
-
-   if [ $EDIT_FLAG -ne 1 ]; then
-      echo -e "\n######## $(date): Launching build-rpms-parallel --$build_type $EXTRA_ARGS $@\n" | tee --append $logfile
-      echo "${BUILD_PKGS_PARALLEL_DIR}/build-rpms-parallel --$build_type $EXTRA_ARGS_COMMON $EXTRA_ARGS_RPM $targets" | tee --append $logfile
-      ${BUILD_PKGS_PARALLEL_DIR}/build-rpms-parallel --$build_type $EXTRA_ARGS_COMMON $EXTRA_ARGS_RPM $targets 2>&1 | tee --append $logfile
-      rc=${PIPESTATUS[0]}
-      if [ $rc -eq 0 ]; then
-         echo -e "\n######## $(date): build-rpm-parallel --$build_type was successful" | tee --append $logfile
-      else
-         echo -e "\n######## $(date): build-rpm-parallel --$build_type failed with rc=$rc" | tee --append $logfile
-         echo -e "\n$(date): build-rpm-parallel --$build_type failed with rc=$rc"
-         exit $rc
-      fi
-   fi
-
-   echo -e "\n$(date): $build_type complete\n"
-   #exit $rc
-}
-
-function progbar()
-{
-   while :; do
-      for s in / - \\ \|; do
-         printf "\r$s"
-         sleep .5
-      done
-   done
-}
-
-# Create $MY_WORKSPACE if it doesn't exist already
-mkdir -p "${MY_WORKSPACE}"
-if [ $? -ne 0 ]; then
-    echo "Failed to create directory '${MY_WORKSPACE}'"
-    exit 1
-fi
-
-echo "Capture build context"
-git_context > "${MY_WORKSPACE}/CONTEXT"
-
-if [ $STD_BUILD -eq 1 ]; then
-   if [ "$TARGETS" == "$EMPTY_TARGETS" ] || [ "$TARGETS_STD" != "$EMPTY_TARGETS" ] || [ "$TARGETS_MISC" != "$EMPTY_TARGETS" ]; then
-      launch_build std
-   else
-      echo "Skipping 'std' build, no valid targets in list: '$TARGETS'"
-   fi
-else
-   echo "Skipping 'std' build"
-fi
-
-if [ $RT_BUILD -eq 1 ]; then
-   if [ "$TARGETS" == "$EMPTY_TARGETS" ] || [ "$TARGETS_RT" != "$EMPTY_TARGETS" ] || [ "$TARGETS_MISC" != "$EMPTY_TARGETS" ]; then
-      launch_build rt
-   else
-      echo "Skipping 'rt' build, no valid targets in list: $TARGETS"
-   fi
-else
-   echo "Skipping 'rt' build"
-fi
-
-if [ $INSTALLER_BUILD -eq 1 ]; then
-   if [ "$TARGETS" == "$EMPTY_TARGETS" ] || [ "$TARGETS_INSTALLER" != "$EMPTY_TARGETS" ] || [ "$TARGETS_MISC" != "$EMPTY_TARGETS" ]; then
-      launch_build installer
-   else
-      echo "Skipping 'installer' build, no valid targets in list: $TARGETS"
-   fi
-else
-   echo "Skipping 'installer' build"
-fi
-
-if [ $CONTAINERS_BUILD -eq 1 ]; then
-   if [ "$TARGETS" == "$EMPTY_TARGETS" ] || [ "$TARGETS_CONTAINERS" != "$EMPTY_TARGETS" ] || [ "$TARGETS_MISC" != "$EMPTY_TARGETS" ]; then
-      launch_build containers
-   else
-      echo "Skipping 'containers' build, no valid targets in list: $TARGETS"
-   fi
-else
-   echo "Skipping 'containers' build"
-fi
-
-# Make sure REFERENCE_BUILD is set to something
-if [ -z $REFERENCE_BUILD ]; then
-    REFERENCE_BUILD=0
-fi
-
-if [ $REFERENCE_BUILD -eq 1 ]; then
-    echo "Saving reference context"
-    build_avoidance_save_reference_context
-fi
-
-echo "All builds were successful"
-
-exit 0
-
diff --git a/build-tools/build-pkgs-serial b/build-tools/build-pkgs-serial
deleted file mode 100755
index f148ff28..00000000
--- a/build-tools/build-pkgs-serial
+++ /dev/null
@@ -1,538 +0,0 @@
-#!/bin/bash
-
-#
-# Copyright (c) 2018 Wind River Systems, Inc.
-#
-# SPDX-License-Identifier: Apache-2.0
-#
-
-#
-# Build first src.rpms, then rpms, from source, or from a downloaded tarball
-# or src.rpm plus our additional patches.
-#
-# This program is a wrapper around build-srpms-serial and build-rpms-serial
-#
-
-BUILD_PKGS_SERIAL_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}" )" )"
-
-source "${BUILD_PKGS_SERIAL_DIR}/git-utils.sh"
-source "${BUILD_PKGS_SERIAL_DIR}/spec-utils"
-
-
-usage () {
-    echo ""
-    echo "Usage: "
-    echo "   Create source and Binary rpms:"
-    echo "   Build optimizations (--no-descendants, --no-required, --no-build-info,"
-    echo "   --no-autoclean, --no-build-avoidance) are not recommended for the first build"
-    echo "   after a clone/pull, nor the final build prior to creating an iso or patch,"
-    echo "   but can be used for intermediate builds."
-    echo "   i.e. while debugging compilation failures."
-    echo "      build-pkgs-serial [--build-avoidance | --no-build-avoidance] [--no-descendants] [--no-required] [--no-build-info] [--no-autoclean] [--careful] [--formal] [ list of package names ]"
-    echo ""
-    echo "   Test build dependencies of a package:"
-    echo "   Note: A full build of all packages should preceed the dependency test build"
-    echo "      build-pkgs-serial --dep-test <package_name>"
-    echo ""
-    echo "   Delete source rpms, and the directories associated with it's creation:"
-    echo "   Note: does not clean an edit environment"
-    echo "      build-pkgs-serial --clean [--build-avoidance | --no-build-avoidance] [ list of package names ]"
-    echo ""
-    echo "   Extract an src.rpm into a pair of git trees to aid in editing it's contents,"
-    echo "   one for source code and one for metadata such as the spec file."
-    echo "   If --no-meta-patch is specified, then WRS patches are omitted."
-    echo "      build-pkgs-serial --edit [--no-meta-patch] [ list of package names ]"
-    echo ""
-    echo "   Delete an edit environment"
-    echo "      build-pkgs-serial --edit --clean [ list of package names ]"
-    echo ""
-    echo "   This help page"
-    echo "      build-pkgs-serial [--help]"
-    echo ""
-}
-
-
-HELP=0
-CLEAN_FLAG=0
-EDIT_FLAG=0
-APPEND_LOG_FLAG=0
-BUILD_AVOIDANCE_FLAG=0
-STD_BUILD=1
-RT_BUILD=1
-INSTALLER_BUILD=0
-CONTAINERS_BUILD=0
-DEP_TEST_FLAG=0
-
-export BUILD_AVOIDANCE_URL=""
-
-# For backward compatibility.  Old repo location or new?
-CENTOS_REPO=${MY_REPO}/centos-repo
-if [ ! -d ${CENTOS_REPO} ]; then
-    CENTOS_REPO=${MY_REPO}/cgcs-centos-repo
-    if [ ! -d ${CENTOS_REPO} ]; then
-        echo "ERROR: directory ${MY_REPO}/centos-repo not found."
-        exit 1
-    fi
-fi
-
-# read the options
-TEMP=$(getopt -o h --long serial,rt,std,installer,containers,layer:,edit,build-avoidance,no-build-avoidance,build-avoidance-dir:,build-avoidance-host:,build-avoidance-user:,build-avoidance-day:,no-meta-patch,no-descendants,no-required,no-build-info,no-autoclean,formal,careful,help,clean,dep-test,append-log -n 'build-pkgs-serial' -- "$@")
-if [ $? -ne 0 ]; then
-    usage
-    exit 1
-fi
-eval set -- "$TEMP"
-
-# extract options and their arguments into variables.
-EXTRA_ARGS_COMMON=""
-EXTRA_ARGS_SRPM=""
-EXTRA_ARGS_RPM=""
-
-export BUILD_AVOIDANCE_OVERRIDE_DIR=""
-export BUILD_AVOIDANCE_OVERRIDE_HOST=""
-export BUILD_AVOIDANCE_OVERRIDE_USR=""
-export BUILD_AVOIDANCE_DAY=""
-
-while true ; do
-    case "$1" in
-        --append-log)     APPEND_LOG_FLAG=1 ; shift ;;
-        --build-avoidance) BUILD_AVOIDANCE_FLAG=1 ; shift ;;
-        --no-build-avoidance) BUILD_AVOIDANCE_FLAG=0 ; shift ;;
-        --build-avoidance-dir)  BUILD_AVOIDANCE_OVERRIDE_DIR=$2; shift 2 ;;
-        --build-avoidance-host)  BUILD_AVOIDANCE_OVERRIDE_HOST=$2; shift 2 ;;
-        --build-avoidance-user)  BUILD_AVOIDANCE_OVERRIDE_USR=$2; shift 2 ;;
-        --build-avoidance-day) BUILD_AVOIDANCE_DAY=$2; shift 2 ;;
-        --no-descendants) EXTRA_ARGS_COMMON+=" --no-descendants" ; shift ;;
-        --formal)         EXTRA_ARGS_COMMON+=" --formal" ; shift ;;
-        --careful)        EXTRA_ARGS_RPM+=" --careful" ; shift ;;
-        --layer)          EXTRA_ARGS_COMMON+=" --layer=$2"; shift 2 ;;
-        --no-required)    EXTRA_ARGS_RPM+=" --no-required" ; shift ;;
-        --no-build-info)  EXTRA_ARGS_COMMON+=" --no-build-info" ; shift ;;
-        --no-autoclean)   EXTRA_ARGS_RPM+=" --no-autoclean" ; shift ;;
-        --no-meta-patch)  EXTRA_ARGS_SRPM+=" --no-meta-patch" ; shift ;;
-        -h|--help)        HELP=1 ; shift ;;
-        --clean)          CLEAN_FLAG=1 ; shift ;;
-        --dep-test)       DEP_TEST_FLAG=1; EXTRA_ARGS_RPM+=" --dep-test"; shift ;;
-        --edit)           EDIT_FLAG=1 ; EXTRA_ARGS_SRPM+=" --edit"; shift ;;
-        --rt)             STD_BUILD=0 ; shift ;;
-        --std)            RT_BUILD=0 ; shift ;;
-        --installer)      INSTALLER_BUILD=1 ; STD_BUILD=0 ; RT_BUILD=0 ; shift ;;
-        --containers)     INSTALLER_BUILD=0 ; STD_BUILD=0 ; RT_BUILD=0 ; CONTAINERS_BUILD=1;shift ;;
-        --serial)         shift ;;
-        --)               shift ; break ;;
-        *)                usage; exit 1 ;;
-    esac
-done
-
-if [ $HELP -eq 1 ]; then
-    usage
-    exit 0
-fi
-
-# Don't source until after BUILD_AVOIDANCE_OVERRIDE_* variables are set.
-source "${BUILD_PKGS_SERIAL_DIR}/build-avoidance-utils.sh"
-
-function my_exit() {
-    build-rpms-parallel --std --tmpfs-clean
-    build-rpms-parallel --rt --tmpfs-clean
-}
-
-function my_sigint() {
-    echo "build-pkgs-parallel sigint"
-    pkill -SIGABRT -P $BASHPID &> /dev/null
-    echo "build-pkgs-parallel waiting"
-    wait
-    echo "build-pkgs-parallel wait complete"
-
-}
-
-function my_sighup() {
-    echo "build-pkgs-parallel sighup"
-    pkill -SIGABRT -P $BASHPID &> /dev/null
-    echo "build-pkgs-parallel waiting"
-    wait
-    echo "build-pkgs-parallel wait complete"
-}
-
-function my_sigabrt() {
-    echo "build-pkgs-parallel sigabrt"
-    pkill -SIGABRT -P $BASHPID &> /dev/null
-    echo "build-pkgs-parallel waiting"
-    wait
-    echo "build-pkgs-parallel wait complete"
-}
-
-function my_sigterm() {
-    echo "build-pkgs-parallel sigterm"
-    pkill -SIGABRT -P $BASHPID &> /dev/null
-    echo "build-pkgs-parallel waiting"
-    wait
-    echo "build-pkgs-parallel wait complete"
-}
-
-trap my_exit EXIT
-trap my_sigint INT
-trap my_sighup HUP
-trap my_sigabrt ABRT
-trap my_sigterm TERM
-
-# Note: For ease of parsing, a TARGETS list always begins and ends 
-# with a space.  An empty target list consistes of two spaces.
-TARGETS=" $@ "
-EMPTY_TARGETS="  "
-
-TARGETS_STD="$EMPTY_TARGETS"
-TARGETS_RT="$EMPTY_TARGETS"
-TARGETS_INSTALLER="$EMPTY_TARGETS"
-TARGETS_CONTAINERS="$EMPTY_TARGETS"
-TARGETS_MISC="$EMPTY_TARGETS"
-
-find_targets () {
-   local centos_pkg_dirs=$1
-   local d=""
-   local d2=""
-   local g=""
-   local x=""
-   local name=""
-   local path=""
-   local RESULT="$EMPTY_TARGETS"
-   local FOUND=0
-
-   for d in $GIT_LIST; do
-      if [ -f $d/$centos_pkg_dirs ]; then
-         for d2 in $(grep -v '^#' $d/$centos_pkg_dirs); do
-            name=""
-            if [ -f $d/$d2/centos/srpm_path ]; then
-               path=$(cat $d/$d2/centos/srpm_path | head -n 1 | \
-                      sed -e "s#^mirror:CentOS/tis-r3-CentOS/mitaka#${CENTOS_REPO}#" \
-                          -e "s#^mirror:#${CENTOS_REPO}/#" \
-                          -e "s#^repo:#$MY_REPO/#" \
-                          -e "s#^Source/#${CENTOS_REPO}/Source/#")
-                name=$(rpm -q --qf='%{NAME}' --nosignature -p $path)
-            else
-                path=$(find $d/$d2/centos/ -name '*.spec' | head -n 1)
-                if [[ ( -z "$path" ) &&  ( -f $d/$d2/centos/spec_path ) ]]; then
-                    path=$(find $MY_REPO/$(cat $d/$d2/centos/spec_path) -maxdepth 1 -name '*.spec' | head -n 1)
-                fi
-                if [ "$path" != "" ]; then
-                   name=$(spec_find_tag Name "$path" 2>> /dev/null)
-                fi
-            fi
-            if [ "$name" != "" ]; then
-               if [ "$BUILD_TYPE" == "rt" ]; then
-                  FOUND=0
-                  for x in $TARGETS; do
-                     if [ "${x: -3}" == "-rt" ]; then
-                        if [ "${name}" == "$x" ] || [ "${name}-rt" == "${x}" ]; then
-                           RESULT+="$x "
-                           FOUND=1
-                           break
-                        fi
-                     fi
-                  done
-                  if [ $FOUND -eq 0 ]; then
-                     for x in $TARGETS; do
-                        if [ "${name}" == "${x}-rt" ]; then
-                           RESULT+="$x-rt "
-                           FOUND=1
-                           break
-                        else
-                           if [ "${name}" == "$x" ] || [ "${name}-rt" == "${x}" ]; then
-                              RESULT+="$x "
-                              FOUND=1
-                              break
-                           fi
-                        fi
-                     done
-                  fi
-               else
-                  for x in $TARGETS; do
-                     if [ "${name}" == "$x" ]; then
-                         RESULT+="$x "
-                         FOUND=1
-                         break
-                     fi
-                  done
-               fi
-            fi
-         done
-      fi
-   done
-
-   echo "$RESULT"
-   return 0
-}
-
-if [ $EDIT_FLAG -eq 1 ] || [ "$TARGETS" != "$EMPTY_TARGETS" ]; then
-   BUILD_AVOIDANCE_FLAG=0
-fi
-
-echo "BUILD_AVOIDANCE_FLAG=$BUILD_AVOIDANCE_FLAG"
-echo "CLEAN_FLAG=$CLEAN_FLAG"
-echo "EDIT_FLAG=$EDIT_FLAG"
-
-if [ "$TARGETS" != "$EMPTY_TARGETS" ]; then
-   TARGETS_STD="$(find_targets centos_pkg_dirs)"
-
-   BUILD_TYPE_SAVE="$BUILD_TYPE"
-   BUILD_TYPE="rt"
-   TARGETS_RT="$(find_targets centos_pkg_dirs_rt)"
-   BUILD_TYPE="installer"
-   TARGETS_INSTALLER="$(find_targets centos_pkg_dirs_installer)"
-   BUILD_TYPE="containers"
-   TARGETS_CONTAINERS="$(find_targets centos_pkg_dirs_containers)"
-   BUILD_TYPE="$BUILD_TYPE_SAVE"
-
-   echo "TARGETS_STD=$TARGETS_STD"
-   echo "TARGETS_RT=$TARGETS_RT"
-   echo "TARGETS_INSTALLER=$TARGETS_INSTALLER"
-   echo "TARGETS_CONTAINERS=$TARGETS_CONTAINERS"
-
-   for x in $TARGETS; do
-       if [[ $TARGETS_STD == *" $x "* ]]
-       then
-           echo "found $x" >> /dev/null;
-       else
-           if [[ $TARGETS_RT == *" $x "* ]]
-           then
-               echo "found $x" >> /dev/null;
-           else
-               if [[ $TARGETS_INSTALLER == *" $x "* ]]
-               then
-                   echo "found $x" >> /dev/null;
-                   INSTALLER_BUILD=1
-               else
-                   if [[ $TARGETS_CONTAINERS == *" $x "* ]]
-                   then
-                       echo "found $x" >> /dev/null;
-                       CONTAINERS_BUILD=1
-                   else
-                       TARGETS_MISC+="$x "
-                   fi
-               fi
-           fi
-       fi
-   done
-fi
-
-echo "EXTRA_ARGS_COMMON='$EXTRA_ARGS_COMMON'"
-echo "EXTRA_ARGS_SRPM='$EXTRA_ARGS_SRPM'"
-echo "EXTRA_ARGS_RPM='$EXTRA_ARGS_RPM'"
-echo "TARGETS='$TARGETS'"
-echo "TARGETS_STD='$TARGETS_STD'"
-echo "TARGETS_RT='$TARGETS_RT'"
-echo "TARGETS_INSTALLER='$TARGETS_INSTALLER'"
-echo "TARGETS_CONTAINERS='$TARGETS_CONTAINERS'"
-echo "TARGETS_MISC='$TARGETS_MISC'"
-
-if [ $CLEAN_FLAG -eq 1 ]; then
-
-   if [ "$TARGETS" == "$EMPTY_TARGETS" ] && [ $BUILD_AVOIDANCE_FLAG -eq 1 ] ; then
-      build_avoidance_clean
-   fi
-
-   if [ $STD_BUILD -eq 1 ]; then
-      if [ "$TARGETS" == "$EMPTY_TARGETS" ] || [ "$TARGETS_STD" != "$EMPTY_TARGETS" ] || [ "$TARGETS_MISC" != "$EMPTY_TARGETS" ]; then
-         if [ $EDIT_FLAG -ne 1 ]; then
-            echo "${BUILD_PKGS_SERIAL_DIR}/build-rpms-serial --std --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_RPM $TARGETS_STD $TARGETS_MISC"
-            ${BUILD_PKGS_SERIAL_DIR}/build-rpms-serial --std --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_RPM $TARGETS_STD $TARGETS_MISC || exit 1
-
-         fi
-      fi
-      if [ "$TARGETS" == "$EMPTY_TARGETS" ] || [ "$TARGETS_STD" != "$EMPTY_TARGETS" ] || [ "$TARGETS_MISC" != "$EMPTY_TARGETS" ]; then
-         echo "${BUILD_PKGS_SERIAL_DIR}/build-srpms-serial --std --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_SRPM $TARGETS_STD $TARGETS_MISC"
-         ${BUILD_PKGS_SERIAL_DIR}/build-srpms-serial --std --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_SRPM $TARGETS_STD $TARGETS_MISC || exit 1
-      fi
-   fi
-
-   if [ $RT_BUILD -eq 1 ]; then
-      if [ "$TARGETS" == "$EMPTY_TARGETS" ] || [ "$TARGETS_RT" != "$EMPTY_TARGETS" ] || [ "$TARGETS_MISC" != "$EMPTY_TARGETS" ]; then
-         if [ $EDIT_FLAG -ne 1 ]; then
-            echo "${BUILD_PKGS_SERIAL_DIR}/build-rpms-serial --rt --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_RPM $TARGETS_RT $TARGETS_MISC"
-            ${BUILD_PKGS_SERIAL_DIR}/build-rpms-serial --rt --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_RPM $TARGETS_RT $TARGETS_MISC || exit 1
-         fi
-      fi
-      if [ "$TARGETS" == "$EMPTY_TARGETS" ] || [ "$TARGETS_RT" != "$EMPTY_TARGETS" ] || [ "$TARGETS_MISC" != "$EMPTY_TARGETS" ]; then
-         echo "${BUILD_PKGS_SERIAL_DIR}/build-srpms-serial --rt --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_SRPM $TARGETS_RT $TARGETS_MISC"
-         ${BUILD_PKGS_SERIAL_DIR}/build-srpms-serial --rt --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_SRPM $TARGETS_RT $TARGETS_MISC || exit 1
-      fi
-   fi
-
-   if [ $INSTALLER_BUILD -eq 1 ]; then
-      if [ "$TARGETS" == "$EMPTY_TARGETS" ] || [ "$TARGETS_INSTALLER" != "$EMPTY_TARGETS" ]; then
-         if [ $EDIT_FLAG -ne 1 ]; then
-            echo "${BUILD_PKGS_SERIAL_DIR}/build-rpms-serial --installer --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_RPM $TARGETS_INSTALLER"
-            ${BUILD_PKGS_SERIAL_DIR}/build-rpms-serial --installer --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_RPM $TARGETS_INSTALLER || exit 1
-         fi
-      fi
-      if [ "$TARGETS" == "$EMPTY_TARGETS" ] || [ "$TARGETS_INSTALLER" != "$EMPTY_TARGETS" ]; then
-         echo "${BUILD_PKGS_SERIAL_DIR}/build-srpms-serial --installer --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_SRPM $TARGETS_INSTALLER"
-         ${BUILD_PKGS_SERIAL_DIR}/build-srpms-serial --installer --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_SRPM $TARGETS_INSTALLER || exit 1
-      fi
-   fi
-
-   if [ $CONTAINERS_BUILD -eq 1 ]; then
-      if [ "$TARGETS" == "$EMPTY_TARGETS" ] || [ "$TARGETS_CONTAINERS" != "$EMPTY_TARGETS" ]; then
-         if [ $EDIT_FLAG -ne 1 ]; then
-            echo "${BUILD_PKGS_SERIAL_DIR}/build-rpms-serial --containers --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_RPM $TARGETS_CONTAINERS"
-            ${BUILD_PKGS_SERIAL_DIR}/build-rpms-serial --containers --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_RPM $TARGETS_CONTAINERS || exit 1
-         fi
-      fi
-      if [ "$TARGETS" == "$EMPTY_TARGETS" ] || [ "$TARGETS_CONTAINERS" != "$EMPTY_TARGETS" ]; then
-         echo "${BUILD_PKGS_SERIAL_DIR}/build-srpms-serial --containers --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_SRPM $TARGETS_CONTAINERS"
-         ${BUILD_PKGS_SERIAL_DIR}/build-srpms-serial --containers --clean $EXTRA_ARGS_COMMON $EXTRA_ARGS_SRPM $TARGETS_CONTAINERS || exit 1
-      fi
-   fi
-
-   exit $?
-fi
-
-function launch_build()
-{
-   local build_type=$1
-   shift
-
-   local logfile=$MY_WORKSPACE/build-$build_type.log
-   local rc
-   local targets
-
-   if [ "$build_type" == "std" ]; then
-      targets="$TARGETS_STD $TARGETS_MISC"
-   else
-      if [ "$build_type" == "rt" ]; then
-         targets="$TARGETS_RT $TARGETS_MISC"
-      else
-         if [ "$build_type" == "installer" ]; then
-            targets="$TARGETS_INSTALLER $TARGETS_MISC"
-         else
-            if [ "$build_type" == "containers" ]; then
-               targets="$TARGETS_CONTAINERS $TARGETS_MISC"
-            else
-               targets="$TARGETS"
-            fi
-         fi
-      fi
-   fi
-
-   echo "Launching $build_type build, logging to $logfile"
-   if [ $APPEND_LOG_FLAG -eq 0 ] && [ -f $logfile ]; then
-       \rm $logfile
-   fi
-
-   echo -e "\n######## $(date): Launching build-srpms-serial --$build_type $EXTRA_ARGS $@\n" | tee --append $logfile
-
-   if [ $BUILD_AVOIDANCE_FLAG -eq 1 ]; then
-      # Build Avoidance requested. Get URL of a usable context, if any.
-      export BUILD_AVOIDANCE_URL=$(get_build_avoidance_context $build_type)
-   fi
-
-   echo "BUILD_AVOIDANCE_URL=$BUILD_AVOIDANCE_URL" | tee --append $logfile
-   if [ "x$BUILD_AVOIDANCE_URL" != "x" ]; then
-      echo "build_avoidance $build_type" | tee --append $logfile
-      build_avoidance $build_type 2>&1 | tee --append $logfile
-   fi
-
-   # No clean flag, call build-srpms-serial followed by build-rpms-serial
-   echo "${BUILD_PKGS_SERIAL_DIR}/build-srpms-serial --$build_type $EXTRA_ARGS_COMMON $EXTRA_ARGS_SRPM $targets" | tee --append $logfile
-   ${BUILD_PKGS_SERIAL_DIR}/build-srpms-serial --$build_type $EXTRA_ARGS_COMMON $EXTRA_ARGS_SRPM $targets 2>&1 | tee --append $logfile
-   rc=${PIPESTATUS[0]}
-   if [ $rc -eq 0 ]; then
-      echo -e "\n######## $(date): build-srpm-serial --$build_type was successful" | tee --append $logfile
-   else
-      echo -e "\n######## $(date): build-srpm-serial --$build_type failed with rc=$rc" | tee --append $logfile
-      echo -e "\n$(date): build-srpm-serial --$build_type failed with rc=$rc"
-      exit $rc
-   fi
-
-   if [ $EDIT_FLAG -ne 1 ]; then
-      echo -e "\n######## $(date): Launching build-rpms-serial --$build_type $EXTRA_ARGS $@\n" | tee --append $logfile
-      echo "${BUILD_PKGS_SERIAL_DIR}/build-rpms-serial --$build_type $EXTRA_ARGS_COMMON $EXTRA_ARGS_RPM $targets" | tee --append $logfile
-      ${BUILD_PKGS_SERIAL_DIR}/build-rpms-serial --$build_type $EXTRA_ARGS_COMMON $EXTRA_ARGS_RPM $targets 2>&1 | tee --append $logfile
-      rc=${PIPESTATUS[0]}
-      if [ $rc -eq 0 ]; then
-         echo -e "\n######## $(date): build-rpm-serial --$build_type was successful" | tee --append $logfile
-      else
-         echo -e "\n######## $(date): build-rpm-serial --$build_type failed with rc=$rc" | tee --append $logfile
-         echo -e "\n$(date): build-rpm-serial --$build_type failed with rc=$rc"
-         exit $rc
-      fi
-   fi
-
-   echo -e "\n$(date): $build_type complete\n"
-   #exit $rc
-}
-
-function progbar()
-{
-   while :; do
-      for s in / - \\ \|; do
-         printf "\r$s"
-         sleep .5
-      done
-   done
-}
-
-# Create $MY_WORKSPACE if it doesn't exist already
-mkdir -p "${MY_WORKSPACE}"
-if [ $? -ne 0 ]; then
-    echo "Failed to create directory '${MY_WORKSPACE}'"
-    exit 1
-fi
-
-echo "Capture build context"
-git_context > "${MY_WORKSPACE}/CONTEXT"
-
-if [ $STD_BUILD -eq 1 ]; then
-   if [ "$TARGETS" == "$EMPTY_TARGETS" ] || [ "$TARGETS_STD" != "$EMPTY_TARGETS" ] || [ "$TARGETS_MISC" != "$EMPTY_TARGETS" ]; then
-      launch_build std
-   else
-      echo "Skipping 'std' build, no valid targets in list: $TARGETS"
-   fi
-else
-   echo "Skipping 'std' build"
-fi
-
-if [ $RT_BUILD -eq 1 ]; then
-   if [ "$TARGETS" == "$EMPTY_TARGETS" ] || [ "$TARGETS_RT" != "$EMPTY_TARGETS" ] || [ "$TARGETS_MISC" != "$EMPTY_TARGETS" ]; then
-      launch_build rt
-   else
-      echo "Skipping 'rt' build, no valid targets in list: $TARGETS"
-   fi
-else
-   echo "Skipping 'rt' build"
-fi
-
-if [ $INSTALLER_BUILD -eq 1 ]; then
-   if [ "$TARGETS" == "$EMPTY_TARGETS" ] || [ "$TARGETS_INSTALLER" != "$EMPTY_TARGETS" ] || [ "$TARGETS_MISC" != "$EMPTY_TARGETS" ]; then
-      launch_build installer
-   else
-      echo "Skipping 'installer' build, no valid targets in list: $TARGETS"
-   fi
-else
-   echo "Skipping 'installer' build"
-fi
-
-if [ $CONTAINERS_BUILD -eq 1 ]; then
-   if [ "$TARGETS" == "$EMPTY_TARGETS" ] || [ "$TARGETS_CONTAINERS " != "$EMPTY_TARGETS" ] || [ "$TARGETS_MISC" != "$EMPTY_TARGETS" ]; then
-      launch_build containers
-   else
-      echo "Skipping 'containers' build, no valid targets in list: $TARGETS"
-   fi
-else
-   echo "Skipping 'containers' build"
-fi
-
-# Make sure REFERENCE_BUILD is set to something
-if [ -z $REFERENCE_BUILD ]; then
-    REFERENCE_BUILD=0
-fi
-
-if [ $REFERENCE_BUILD -eq 1 ]; then
-    echo "Saving reference context"
-    build_avoidance_save_reference_context
-fi
-
-echo "All builds were successful"
-
-exit 0
-
diff --git a/build-tools/build-rpms b/build-tools/build-rpms
deleted file mode 100755
index 1867743e..00000000
--- a/build-tools/build-rpms
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/bin/bash
-
-#
-# Copyright (c) 2018 Wind River Systems, Inc.
-#
-# SPDX-License-Identifier: Apache-2.0
-#
-
-#
-# Builds rpm files from src.rpm files.
-#
-# This program is a wrapper around build-rpms-parallel and build-rpms-serial
-#
-
-DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-
-usage () {
-    echo ""
-    echo "Usage: "
-    echo "   Create binary rpms:"
-    echo "   build-rpms [--serial] [args]"
-}
-
-SERIAL_FLAG=0
-
-for arg in "$@"; do
-    case "$1" in
-        --serial) SERIAL_FLAG=1 ;;
-    esac
-done
-
-which mock_tmpfs_umount >> /dev/null
-if [ $? -ne 0 ]; then
-    SERIAL_FLAG=1
-fi
-
-if [ $SERIAL_FLAG -eq 1 ]; then
-    echo "build-rpms-serial $@"
-    build-rpms-serial "$@"
-else
-    echo "build-rpms-parallel $@"
-    build-rpms-parallel "$@"
-fi
-
diff --git a/build-tools/build-rpms-parallel b/build-tools/build-rpms-parallel
deleted file mode 100755
index d0d729bd..00000000
--- a/build-tools/build-rpms-parallel
+++ /dev/null
@@ -1,2507 +0,0 @@
-#!/bin/bash
-
-#
-# Copyright (c) 2018-2020 Wind River Systems, Inc.
-#
-# SPDX-License-Identifier: Apache-2.0
-#
-
-#
-# Builds rpm files from src.rpm files.
-#
-# This version compiles packages in parrallel if sufficient resources
-# (cpu's and memory) are available.
-#
-# The location of packages to be built is
-# $MY_WORKSPACE/<build-type>/rpmbuild/SRPMS.
-#
-# The build order is a derived from the BuildRequires in the
-# spec files in the src.rpms.  Note that the BuildRequires sometimes
-# create dependency loops, so no correct order can be computed.  In these
-# cases we add a retry loop.  As long as one new package builds, we
-# keep retrying the loop, until all are built, or no progress is made.
-# So please don't panic and CTRL-C just because you see a few error
-# messages go by!
-#
-
-export ME=$(basename "$0")
-CMDLINE="$ME $@"
-BUILD_RPMS_PARALLEL_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}" )" )"
-
-# Set PKG_MANAGER for our build environment.
-source "${BUILD_RPMS_PARALLEL_DIR}/pkg-manager-utils.sh"
-
-
-# Build for distribution.  Currently 'centos' is only supported value.
-export DISTRO="centos"
-
-# Maximum number of parallel build environments
-ABSOLUTE_MAX_WORKERS=4
-
-# Maximum space in gb for each tmpfs based parallel build environment.
-#  Note: currently 11 gb is sufficient to build everything except ceph
-MAX_MEM_PER_WORKER=11
-
-# Minimum space in gb for each tmpfs based parallel build environment
-#  Note: tmpfs is typically 2.5 gb when compiling many small jobs
-MIN_MEM_PER_WORKER=3
-
-# Maximum number of disk based parallel build environments
-MAX_DISK_BASED_WORKERS=2
-
-# Minimum space in gb for each disk based parallel build environment
-MIN_DISK_PER_WORKER=20
-
-# How many srpms to build before we add another parallel build environment
-MIN_TASKS_PER_CORE=3
-
-# Max number of new concurrent builds to allow for
-MAX_SHARE_FACTOR=4
-
-# Always leave at least MEMORY_RESERVE gb of available mem for the system
-MEMORY_RESERVE=1
-
-# These two values will be reassigned in the 'compute_resources' subroutine
-MOCKCHAIN_RESOURCE_ALLOCATION=""
-MAX_WORKERS=$ABSOLUTE_MAX_WORKERS
-
-
-CREATEREPO=$(which createrepo_c)
-if [ $? -ne 0 ]; then
-   CREATEREPO="createrepo"
-fi
-
-# Old repo path or new?
-LOCAL_REPO=${MY_REPO}/local-repo
-if [ ! -d ${LOCAL_REPO} ]; then
-    LOCAL_REPO=${MY_REPO}/cgcs-tis-repo
-    if [ ! -d ${LOCAL_REPO} ]; then
-        # This one isn't fatal, LOCAL_REPO is not required
-        LOCAL_REPO=${MY_REPO}/local-repo
-    fi
-fi
-
-# Make sure we have a dependency cache
-DEPENDANCY_DIR="${LOCAL_REPO}/dependancy-cache"
-SRPM_DIRECT_REQUIRES_FILE="$DEPENDANCY_DIR/SRPM-direct-requires"
-SRPM_TRANSITIVE_REQUIRES_FILE="$DEPENDANCY_DIR/SRPM-transitive-requires"
-SRPM_TRANSITIVE_DESCENDANTS_FILE="$DEPENDANCY_DIR/SRPM-transitive-descendants"
-SRPM_DIRECT_DESCENDANTS_FILE="$DEPENDANCY_DIR/SRPM-direct-descendants"
-SRPM_RPM_DIRECT_REQUIRES_FILE="$DEPENDANCY_DIR/SRPM-direct-requires-rpm"
-RPM_DIRECT_REQUIRES_FILE="$DEPENDANCY_DIR/RPM-direct-requires"
-RPM_TO_SRPM_MAP_FILE="$DEPENDANCY_DIR/rpm-to-srpm"
-SRPM_TO_RPM_MAP_FILE="$DEPENDANCY_DIR/srpm-to-rpm"
-
-UNBUILT_PATTERN_FILE="$MY_REPO/build-data/unbuilt_rpm_patterns"
-
-SIGN_SECURE_BOOT="sign-secure-boot"
-SIGN_SECURE_BOOT_LOG="sign-secure-boot.log"
-
-export MOCK=/usr/bin/mock
-
-BUILD_RPMS_PARALLEL_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}" )" )"
-source "${BUILD_RPMS_PARALLEL_DIR}/image-utils.sh"
-source "${BUILD_RPMS_PARALLEL_DIR}/wheel-utils.sh"
-source "${BUILD_RPMS_PARALLEL_DIR}/spec-utils"
-source "${BUILD_RPMS_PARALLEL_DIR}/srpm-utils"
-
-HOME=$(pwd)
-
-usage () {
-    echo ""
-    echo "Usage: "
-    echo "   $ME [ [--rt] [--no-required] [--no-descendants] [--no-build-info] [--no-autoclean] [--formal] <optional list of package names> ]"
-    echo "   $ME --dep-test <package name>"
-    echo "   $ME --clean [ [--no-descendants] <optional list of package names> ]"
-    echo "   $ME --help"
-    echo ""
-}
-
-
-number_of_users () {
-    users | tr ' ' '\n' | sort --uniq | wc -l
-}
-
-total_mem_gb () {
-    free -g | grep 'Mem:' | awk '{ print $2 }'
-}
-
-available_mem_gb () {
-    free -g | grep 'Mem:' | awk '{ print $7 }'
-}
-
-available_disk_gb () {
-    df -BG $MY_WORKSPACE | grep -v '^Filesystem' | awk '{ print $4 }' | sed 's#G$##'
-}
-
-number_of_cpus () {
-    /usr/bin/nproc
-}
-
-number_of_builds_in_progress () {
-    local x
-    x=$(ps -ef | grep build-pkgs-parallel | wc -l)
-    x=$((x-1))
-    echo $x
-}
-
-sqrt () {
-    echo -e "sqrt($1)" | bc -q -i | head -2 | tail -1
-}
-
-join_by () { local IFS="$1"; shift; echo "$*"; }
-
-create-no-clean-list () {
-   local MY_YUM_CONF=$(create-yum-conf)
-   local NO_CLEAN_LIST_FILE=$MY_WORKSPACE/no_clean_list.txt
-   local NEED_REBUILD=0
-
-   if [ ! -f $NO_CLEAN_LIST_FILE ]; then
-       NEED_REBUILD=1
-   else
-       if [ -f $MY_BUILD_CFG ]; then
-           find "$MY_BUILD_CFG" -not -newermm "$NO_CLEAN_LIST_FILE" | grep -q $(basename $MY_BUILD_CFG)
-           if [ $? -eq 0 ]; then
-               NEED_REBUILD=1
-           fi
-       fi
-   fi
-
-   if [ $NEED_REBUILD -eq 1 ]; then
-       local install_groups=""
-       local install_packages=""
-       local p
-
-       for p in $(grep "config_opts\['chroot_setup_cmd'\]" $MY_BUILD_CFG | tail -n1 | cut -d '=' -f 2 | sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//' -e "s/^'//" -e "s/'$//" -e 's/^install //'); do
-          if [[ $p == @* ]] ; then
-              install_groups=$(join_by ' ' $install_groups $(echo $p | cut -c 2-))
-          else
-              install_packages=$(join_by ' ' $install_packages $p)
-          fi
-       done
-
-       local noclean_last_list_len=0
-       local noclean_list=""
-       local tmp_list=""
-       local g
-
-       for g in $install_groups; do
-           # Find mandatory packages in the group.
-           # Discard anything before (and including) 'Mandatory Packages:'
-           # and anything after (and including) 'Optional Packages:'.
-           # Also discard leading spaces or '+' characters.
-           tmp_list=$(${PKG_MANAGER} -c $MY_YUM_CONF groupinfo $g 2>> /dev/null \
-                        | awk 'f;/Mandatory Packages:/{f=1}' \
-                        | sed -n '/Optional Packages:/q;p' \
-                        | sed 's#[ +]*##')
-           noclean_list=$(join_by ' ' $noclean_list $tmp_list)
-       done
-
-       noclean_list=$(join_by ' ' $noclean_list $install_packages)
-       noclean_list=$(echo $noclean_list | tr ' ' '\n' | sort --uniq)
-       noclean_list_len=$(echo $noclean_list | wc -w)
-
-       while [ $noclean_list_len -gt $noclean_last_list_len ]; do
-           noclean_last_list_len=$noclean_list_len
-           noclean_list=$( (${PKG_MANAGER} -c $MY_YUM_CONF deplist $noclean_list 2>> /dev/null | grep provider: | awk '{ print $2 }' | awk -F . '{ print $1 }'; for p in $noclean_list; do echo $p; done) | sort --uniq)
-           noclean_list_len=$(echo $noclean_list | wc -w)
-       done
-
-       echo $noclean_list > $NO_CLEAN_LIST_FILE
-   fi
-
-   cat $NO_CLEAN_LIST_FILE
-}
-
-str_lst_contains() {
-   TARGET="$1"
-   LST="$2"
-   if [[ $LST =~ (^|[[:space:]])$TARGET($|[[:space:]]) ]] ; then
-      return 0
-   else
-      return 1
-   fi
-}
-
-compute_resources () {
-    local weight=0
-    local b
-
-    echo ""
-    for f in $@; do
-        b=$(basename $f)
-        if [ -f $SOURCES_DIR/$b/BIG ] || [ ]; then
-            weight=$((weight+MIN_TASKS_PER_CORE))
-        else
-            weight=$((weight+1))
-        fi
-    done
-    weight=$((weight/MIN_TASKS_PER_CORE))
-
-    # gather data about the build machines resources
-    local users=$(number_of_users)
-    if [ $users -lt 1 ]; then users=1; fi
-    local mem=$(available_mem_gb)
-    local total_mem=$(total_mem_gb)
-    local disk=$(available_disk_gb)
-    local cpus=$(number_of_cpus)
-    local num_users=$(sqrt $users)
-    local num_build=$(number_of_builds_in_progress)
-    num_build=$((num_build+1))
-    echo "compute_resources: total: cpus=$cpus, total_mem=$total_mem, avail_mem=$mem, disk=$disk, weight=$weight, num_build=$num_build"
-
-    # What fraction of the machine will we use
-    local share_factor=$num_users
-    if [ $share_factor -gt $((MAX_SHARE_FACTOR+num_build-1)) ]; then share_factor=$((MAX_SHARE_FACTOR+num_build-1)); fi
-    if [ $share_factor -lt $num_build ]; then share_factor=$num_build; fi
-
-    # What fraction of free memory can we use.  
-    #  e.g.
-    #  We intend to support 4 concurrent builds (share_factor)
-    #  Two builds (excluding ours) are already underway (num_build-1)
-    #  So we should be able to support 2 more builds (mem_share_factor)
-    local mem_share_factor=$((share_factor-(num_build-1)))
-    if [ $mem_share_factor -lt 1 ]; then mem_share_factor=1; fi
-
-    echo "compute_resources: share_factor=$share_factor mem_share_factor=$mem_share_factor"
-
-    # What resources are we permitted to use
-    #   Continuing the example from above ... memory share is the lesser of
-    #   - Half the available memory     (mem/mem_share_factor)
-    #   - A quarter of the total memory (total_mem/share_factor)
-    local mem_share=$(((mem-MEMORY_RESERVE)/mem_share_factor))
-    if [ $mem_share -lt 0 ]; then mem_share=0; fi
-    local total_mem_share=$(((total_mem-MEMORY_RESERVE)/share_factor))
-    if [ $total_mem_share -lt 0 ]; then total_mem_share=0; fi
-    if [ $mem_share -gt $total_mem_share ]; then mem_share=$total_mem_share; fi
-    local disk_share=$((disk/share_factor))
-    local cpus_share=$((cpus/share_factor))
-
-    echo "compute_resources: our share: cpus=$cpus_share, mem=$mem_share, disk=$disk_share"
-
-    # How many build jobs, how many jobs will use tmpfs, and how much mem for each tmpfs
-    local workers=$cpus_share
-    if [ $workers -gt $MAX_WORKERS ]; then workers=$MAX_WORKERS; fi
-    if [ $workers -gt $weight ]; then workers=$weight; fi
-    if [ $workers -lt 1 ]; then workers=1; fi
-    local max_mem_based_workers=$((mem_share/MIN_MEM_PER_WORKER))
-    if [ $max_mem_based_workers -lt 0 ]; then max_mem_based_workers=0; fi
-    local max_disk_based_workers=$((disk_share/MIN_DISK_PER_WORKER))
-    if [ $max_disk_based_workers -gt $MAX_DISK_BASED_WORKERS ]; then max_disk_based_workers=$MAX_DISK_BASED_WORKERS; fi
-    if [ $max_disk_based_workers -lt 1 ]; then max_disk_based_workers=1; fi
-    echo "max_disk_based_workers=$max_disk_based_workers, max_mem_based_workers=$max_mem_based_workers"
-    local mem_based_workers=$max_mem_based_workers
-    if [ $mem_based_workers -ge $workers ]; then mem_based_workers=$((workers-1)); fi
-    local disk_based_workers=$((workers-mem_based_workers))
-    if [ $disk_based_workers -gt $max_disk_based_workers ]; then disk_based_workers=$max_disk_based_workers; fi
-    if [ $disk_based_workers -lt 1 ]; then disk_based_workers=1; fi
-    echo "disk_based_workers=$disk_based_workers, mem_based_workers=$mem_based_workers"
-    if [ $workers -gt $((disk_based_workers+mem_based_workers)) ]; then workers=$((disk_based_workers+mem_based_workers)); fi
-    local mem_spoken_for=$((mem_based_workers*MIN_MEM_PER_WORKER))
-    local avail_mem=$((mem_share-mem_spoken_for))
-    local x=""
-    for i in $(seq 0 $((workers-1))); do
-        if [ $i -lt $disk_based_workers ]; then
-            x="$x:0"
-        else
-            extra_mem=$(($MAX_MEM_PER_WORKER-$MIN_MEM_PER_WORKER))
-            if [ $extra_mem -gt $avail_mem ]; then extra_mem=$avail_mem; fi
-            avail_mem=$((avail_mem-extra_mem))
-            mem_for_worker=$((MIN_MEM_PER_WORKER+extra_mem))
-            x="$x:$mem_for_worker"
-        fi
-    done
-
-    # Our output is saved in environment variables
-    MOCKCHAIN_RESOURCE_ALLOCATION=$(echo $x | sed 's#^:##')
-    MAX_WORKERS=$workers
-    echo "compute_resources: MAX_WORKERS=$MAX_WORKERS, MOCKCHAIN_RESOURCE_ALLOCATION=$MOCKCHAIN_RESOURCE_ALLOCATION"
-    echo ""
-}
-
-
-#
-# Create a list of rpms in the directory
-#
-create_lst () {
-   local DIR=${1}
-
-       (cd $DIR
-        [ -f rpm.lst ] && \rm -rf rpm.lst
-        [ -f srpm.lst ] && \rm -rf srpm.lst
-        find . -name '*.rpm' -and -not -name '*.src.rpm' | sed 's#^[.][/]##' | sort > rpm.lst
-        find . -name '*.src.rpm' | sed 's#^[.][/]##' | sort > srpm.lst
-       )
-}
-
-#
-# Delete old repodata and reate a new one
-#
-recreate_repodata () {
-   local DIR=${1}
-
-       (
-        mkdir -p $DIR
-        cd $DIR
-        if [ -f repodata/*comps*xml ]; then
-           \mv repodata/*comps*xml comps.xml
-        fi
-        \rm -rf repodata
-        \rm -rf .repodata
-        if [ -f comps.xml ]; then
-           $CREATEREPO -g comps.xml --workers $(number_of_cpus) $(pwd)
-        else
-           $CREATEREPO --workers $(number_of_cpus) $(pwd)
-        fi
-       )
-}
-
-#
-# Update existing repodata
-#
-update_repodata () {
-   local DIR=${1}
-
-       (cd $DIR
-        TMP=$(mktemp /tmp/update_repodata_XXXXXX)
-        RC=0
-        if [ -f comps.xml ]; then
-           $CREATEREPO --update -g comps.xml --workers $(number_of_cpus) $(pwd) &> $TMP
-           RC=$?
-        else
-           $CREATEREPO --update --workers $(number_of_cpus) $(pwd) &> $TMP
-           RC=$?
-        fi
-        if [ $RC -ne 0 ]; then
-           cat $TMP
-        fi
-        \rm -f $TMP
-       )
-}
-
-#
-# return array that is the intersection of two other arrays
-#
-# NEW_ARRAY=( $( intersection ARRAY1 ARRAY2 ) )
-#
-intersection () {
-   local Aname=$1[@]
-   local Bname=$2[@]
-   local A=("${!Aname}")
-   local B=("${!Bname}")
-
-   # echo "${A[@]}"
-   # echo "${B[@]}"
-   for a in "${A[@]}"; do
-      # echo "a=$a"
-      for b in "${B[@]}"; do
-         # echo "b=$b"
-         if [ "$a" == "$b" ]; then
-            echo "$a"
-            break
-         fi
-      done
-   done
-}
-
-#
-# return array that is the union of two other arrays
-#
-# NEW_ARRAY=( $( union ARRAY1 ARRAY2 ) )
-#
-union () {
-   local Aname=$1[@]
-   local Bname=$2[@]
-   local A=("${!Aname}")
-   local B=("${!Bname}")
-   local a
-   local b
-
-   for a in "${A[@]}"; do
-      echo "$a"
-   done
-
-   for b in "${B[@]}"; do
-      local found=0
-      for a in "${A[@]}"; do
-         if [ "$a" == "$b" ]; then
-            found=1
-            break
-         fi
-      done
-      if [ $found -eq 0 ]; then
-         echo $b
-      fi
-   done
-}
-
-#
-# returns 0 if element is in the array
-#
-#  e.g.  contains ARRAY $SEEKING  && echo "$SEEKING is in 'ARRAY'"
-#
-contains () {
-   local Aname=$1[@]
-   local A=("${!Aname}")
-   local seeking=$2
-   local in=1
-
-    for a in "${A[@]}"; do
-        if [[ $a == $seeking ]]; then
-            in=0
-            break
-        fi
-    done
-    return $in
-}
-
-#
-# Append element to array if not present
-#
-# ARRAY=( $( put ARRAY $ELEMENT ) )
-#
-put () {
-   local Aname=$1[@]
-   local A=("${!Aname}")
-   local element="$2"
-   for a in "${A[@]}"; do
-      echo "$a"
-   done
-   contains A "$element" || echo "$element"
-}
-
-build_order_recursive () {
-   local target=$1
-   local idx
-   local remainder_list
-   local needs
-   local needs_list
-
-   for((idx=0;idx<${#UNORDERED_LIST[@]};idx++)); do
-      if [ ${UNORDERED_LIST[idx]} == $target ]; then
-         remainder_list=( ${UNORDERED_LIST[@]:0:$idx} ${UNORDERED_LIST[@]:$((idx + 1))} )
-         UNORDERED_LIST=( ${remainder_list[@]} )
-         needs=( $(grep "^$target;" "$SRPM_DIRECT_REQUIRES_FILE" | sed "s/$target;//" | sed 's/,/ /g') )
-         needs_list=( $(intersection needs remainder_list) )
-         for((idx=0;idx<${#needs_list[@]};idx++)); do
-             build_order_recursive ${needs_list[idx]}
-         done
-         echo $target
-         break
-      fi
-   done
-}
-
-build_order () {
-   local Aname=$1[@]
-   local original_list=("${!Aname}")
-   local needs
-   local needs_list
-   local remainder_list
-   local idx
-   local element
-   local next_start=0
-   local old_next_start=0
-   local progress=1
-
-   while [ ${#original_list[@]} -gt 0 ] && [ $progress -gt 0 ]; do
-      progress=0
-      old_next_start=$next_start
-      for((idx=$next_start;idx<${#original_list[@]};idx++)); do
-         element=${original_list[idx]}
-         next_start=$idx
-         remainder_list=( ${original_list[@]:0:$idx} ${original_list[@]:$((idx + 1))} )
-         needs=( $(grep "^$element;" "$SRPM_DIRECT_REQUIRES_FILE" | sed "s/$element;//" | sed 's/,/ /g') )
-         needs_list=( $(intersection needs remainder_list) )
-         if [ ${#needs_list[@]} -eq 0 ]; then
-            echo "$element"
-            original_list=( "${remainder_list[@]}" )
-            if [ $next_start -ge ${#original_list[@]} ]; then
-               next_start=0
-            fi
-            progress=1
-            break
-         fi
-      done
-      if [ $old_next_start -ne 0 ]; then
-         progress=1
-         next_start=0
-      fi
-   done
-
-   if [ ${#original_list[@]} -gt 0 ]; then
-      # Had trouble calculating a build order for these remaining packages, so stick them at the end
-      UNORDERED_LIST=( ${original_list[@]} )
-      while [ ${#UNORDERED_LIST[@]} -gt 0 ]; do
-         element=${UNORDERED_LIST[0]}
-         build_order_recursive $element
-      done
-   fi
-}
-
-set_mock_symlinks () {
-   local LNK
-   local DEST
-   local CFG=$1
-   if [ -d /localdisk/loadbuild/mock ]; then
-      mkdir -p $MY_WORKSPACE
-      LNK=$(echo "/localdisk/loadbuild/mock/$(basename $CFG)" | sed 's/.cfg$//')
-      if [ ! -L $LNK ] && [ -d $LNK ]; then
-         echo "WARNING: Found directory at '$LNK' when symlink was expected. Fixing..."
-         \rm -rf $LNK
-         if [ -d $LNK ]; then
-            \mv $LNK $LNK.clean_me
-         fi
-      fi
-      if [ -L $LNK ]; then
-         DEST=$(readlink $LNK)
-         if [ "$DEST" != "$MY_WORKSPACE" ] || [ ! -d "$MY_WORKSPACE" ]; then
-            echo "WARNING: Found broken symlink at '$LNK'. Fixing..."
-            \rm -f $LNK
-         fi
-      fi
-      if [ ! -L $LNK ]; then
-         if [ ! -d "$MY_WORKSPACE" ]; then
-            echo "ERROR: Can't create symlink from $LNK to $MY_WORKSPACE as destination does not exist."
-            exit 1
-         fi
-         ln -s $MY_WORKSPACE $LNK
-      fi
-   fi
-
-   if [ -d /localdisk/loadbuild/mock-cache ]; then
-      mkdir -p $MY_WORKSPACE/cache
-      LNK=$(echo "/localdisk/loadbuild/mock-cache/$(basename $CFG)" | sed 's/.cfg$//')
-      if [ ! -L $LNK ] && [ -d $LNK ]; then
-         echo "WARNING: Found directory at '$LNK' when symlink was expected. Fixing..."
-         \rm -rf $LNK
-         if [ -d $LNK ]; then
-            \mv $LNK $LNK.clean_me
-         fi
-      fi
-      if [ -L $LNK ]; then
-         DEST=$(readlink $LNK)
-         if [ "$DEST" != "$MY_WORKSPACE/cache" ] || [ ! -d "$MY_WORKSPACE/cache" ]; then
-            echo "WARNING: Found broken symlink at '$LNK'. Fixing..."
-            \rm -f $LNK
-         fi
-      fi
-      if [ ! -L $LNK ]; then
-         if [ ! -d "$MY_WORKSPACE/cache" ]; then
-            echo "ERROR: Can't create symlink from $LNK to $MY_WORKSPACE/cache as destination does not exist."
-            exit 1
-         fi
-         ln -s $MY_WORKSPACE/cache $LNK
-      fi
-   fi
-}
-
-remove_mock_symlinks () {
-   local LNK
-   local CFG=$1
-   if [ -d /localdisk/loadbuild/mock ]; then
-      LNK=$(echo "/localdisk/loadbuild/mock/$(basename $CFG)" | sed 's/.cfg$//')
-      if [ -L $LNK ]; then
-         \rm -f $LNK
-      fi
-      if [ -d $LNK ]; then
-         \rm -rf $LNK
-         if [ $? -ne 0 ]; then
-            \mv -f $LNK $LNK.clean_me
-         fi
-      fi
-   fi
-
-   if [ -d /localdisk/loadbuild/mock-cache ]; then
-      LNK=$(echo "/localdisk/loadbuild/mock-cache/$(basename $MY_BUILD_CFG)" | sed 's/.cfg$//')
-      if [ -L $LNK ]; then
-         \rm -f $MY_WORKSPACE/cache $LNK
-      fi
-      if [ -d $LNK ]; then
-         \rm -rf $LNK
-         if [ $? -ne 0 ]; then
-            \mv -f $LNK $LNK.clean_me
-         fi
-      fi
-   fi
-}
-
-umount_mock_root_as_tmpfs_all () {
-    for i in $(seq 0 $((ABSOLUTE_MAX_WORKERS-1))); do
-        umount_mock_root_as_tmpfs $i
-    done
-}
-
-umount_mock_root_as_tmpfs_cfg () {
-    local CFG=$1
-    local build_idx=$(basename $CFG | sed 's#.*[.]b\([0-9]*\)[.]cfg#\1#')
-    if [ "$build_idx" != "" ]; then
-        umount_mock_root_as_tmpfs $build_idx
-    else
-        echo "umount_mock_root_as_tmpfs_cfg: Failed to map '$CFG' to a build_idx"
-    fi
-}
-
-umount_mock_root_as_tmpfs () {
-   local build_idx=$1
-   local mount_dir=$(readlink -f $MY_WORKSPACE/mock)/b${build_idx}/root
-   local rc
-
-   mount | grep tmpfs | grep $mount_dir &> /dev/null
-   if [ $? -ne 0 ]; then
-      return 0
-   fi
-   mock_tmpfs_umount $mount_dir &> /dev/null
-
-   rc=$?
-   if [ $rc -ne 0 ]; then
-      echo "FAILED: mock_tmpfs_umount $mount_dir"
-   fi
-   return $rc
-}
-
-kill_descendents ()
-{
-    local kill_pid=$1
-    local kill_all=$2
-    local need_stop=$3
-    local iteration=$4
-    local ret=0
-    local rc=0
-
-    # echo "kill_descendents pid=$kill_pid, all=$kill_all stop=$need_stop, iteration=$iteration"
-
-    local relevant_recursive_children="$ME"
-    local relevant_recursive_promote_children="mock"
-    local relevant_other_children="mockchain-parallel mockchain-parallel-1.3.4 mockchain-parallel-1.4.16 mockchain-parallel-2.6 mockchain-parallel-2.7"
-
-    local recursive_promote_children=$(for relevant_child in $relevant_recursive_promote_children; do pgrep -P $kill_pid $relevant_child; done)
-    local recursive_children=$(for relevant_child in $relevant_recursive_children; do pgrep -P $kill_pid $relevant_child; done)
-    local other_children=""
-
-    if [ $kill_all -eq 1 ]; then
-        recursive_promote_children=""
-        recursive_children=$(pgrep -P $kill_pid)
-    fi
-
-    if [ $iteration -eq 0 ]; then
-        other_children=$(for relevant_child in $relevant_other_children; do pgrep -P $kill_pid $relevant_child; done)
-        if [ "$other_children" != "" ]; then
-            ret=1
-        fi
-    fi
-
-    if [ $need_stop -eq 1 ]; then
-        for pid in $recursive_children $recursive_promote_children; do
-            kill -SIGSTOP $pid &> /dev/null
-        done
-    fi
-
-    for pid in $recursive_children; do
-        kill_descendents "$pid" $kill_all $need_stop $((iteration + 1))
-    done
-    for pid in $recursive_promote_children; do
-        kill_descendents "$pid" 1 1 $((iteration + 1))
-    done
-
-    # echo "kill: $recursive_children $recursive_promote_children"
-    for pid in $recursive_children $recursive_promote_children; do
-        kill $pid &> /dev/null
-        rc=$?
-        if [ $need_stop -eq 1 ]; then
-            kill -SIGCONT $pid &> /dev/null
-        fi
-        if [ $rc -eq 0 ] && [ $iteration -eq 0 ]; then
-            wait $pid
-        fi
-    done
-
-    # echo "kill: $other_children"
-    for pid in $other_children; do
-        kill $pid &> /dev/null
-        rc=$?
-        if [ $rc -eq 0 ] && [ $iteration -eq 0 ]; then
-            wait $pid
-        fi
-    done
-
-    return $ret
-}
-
-function my_exit_n() {
-    local need_mock_cleanup
-    # echo "$BASHPID: $ME: my_exit: killing children"
-    local need_mock_cleanup
-    kill_descendents  $BASHPID 0 0 0
-    need_mock_cleanup=$?
-    # echo "$BASHPID: $ME: my_exit: waiting"
-    wait
-    # echo "$BASHPID: $ME: my_exit: wait complete"
-    # echo "$BASHPID: $ME: my_exit: need_mock_cleanup=$need_mock_cleanup"
-    if [ $need_mock_cleanup -ne 0 ]; then
-        umount_mock_root_as_tmpfs_all
-    fi
-}
-
-function my_exit() {
-    local need_mock_cleanup
-    # echo "$BASHPID: $ME: my_exit: killing children"
-    local need_mock_cleanup
-    kill_descendents  $BASHPID 0 0 0
-    need_mock_cleanup=$?
-    # echo "$BASHPID: $ME: my_exit: waiting"
-    wait
-    # echo "$BASHPID: $ME: my_exit: wait complete"
-    # echo "$BASHPID: $ME: my_exit: need_mock_cleanup=$need_mock_cleanup"
-    if [ $need_mock_cleanup -ne 0 ]; then
-        sleep 1
-    fi
-    umount_mock_root_as_tmpfs_all
-}
-
-function my_sigint_n() {
-    local ARG=$1
-    echo "$BASHPID: $ME: my_sigint_n: ARG=$ARG"
-    echo "$BASHPID: $ME: my_sigint_n: killing children"
-    local need_mock_cleanup
-    kill_descendents  $BASHPID 0 0 0
-    need_mock_cleanup=$?
-    echo "$BASHPID: $ME: my_sigint_n: waiting"
-    wait
-    echo "$BASHPID: $ME: my_sigint_n: wait complete"
-    if [ $need_mock_cleanup -ne 0 ]; then
-        umount_mock_root_as_tmpfs_cfg $ARG
-    fi
-    exit 1
-}
-
-function my_sighup_n() {
-    local ARG=$1
-    echo "$BASHPID: $ME: my_sighup_n: ARG=$ARG"
-    echo "$BASHPID: $ME: my_sighup_n: killing children"
-    local need_mock_cleanup
-    kill_descendents  $BASHPID 0 0 0
-    need_mock_cleanup=$?
-    echo "$BASHPID: $ME: my_sighup_n: waiting"
-    wait
-    echo "$BASHPID: $ME: my_sighup_n: wait complete"
-    if [ $need_mock_cleanup -ne 0 ]; then
-        umount_mock_root_as_tmpfs_cfg $ARG
-    fi
-    exit 1
-}
-
-function my_sigabrt_n() {
-    local ARG=$1
-    echo "$BASHPID: $ME: my_sigabrt_n: ARG=$ARG"
-    echo "$BASHPID: $ME: my_sigabrt_n: killing children"
-    local need_mock_cleanup
-    kill_descendents  $BASHPID 0 0 0
-    need_mock_cleanup=$?
-    echo "$BASHPID: $ME: my_sigabrt_n: waiting"
-    wait
-    echo "$BASHPID: $ME: my_sigabrt_n: wait complete"
-    if [ $need_mock_cleanup -ne 0 ]; then
-        umount_mock_root_as_tmpfs_cfg $ARG
-    fi
-    exit 1
-}
-
-function my_sigterm_n() {
-    local ARG=$1
-    echo "$BASHPID: $ME: my_sigterm_n: ARG=$ARG"
-    echo "$BASHPID: $ME: my_sigterm_n: killing children"
-    local need_mock_cleanup
-    kill_descendents  $BASHPID 0 0 0
-    need_mock_cleanup=$?
-    echo "$BASHPID: $ME: my_sigterm_n: waiting"
-    wait
-    echo "$BASHPID: $ME: my_sigterm_n: wait complete"
-    echo "$BASHPID: $ME: my_sigterm_n: need_mock_cleanup=$need_mock_cleanup"
-    if [ $need_mock_cleanup -ne 0 ]; then
-        umount_mock_root_as_tmpfs_cfg $ARG
-    fi
-    exit 1
-}
-
-function my_sigint() {
-    echo "$BASHPID: $ME: my_sigint: killing children"
-    local need_mock_cleanup
-    kill_descendents  $BASHPID 0 0 0
-    need_mock_cleanup=$?
-    echo "$BASHPID: $ME: my_sigterm_n: waiting"
-    wait
-    echo "$BASHPID: $ME: my_sigterm_n: wait complete"
-    if [ $need_mock_cleanup -ne 0 ]; then
-        umount_mock_root_as_tmpfs_all
-    fi
-    exit 1
-}
-
-function my_sighup() {
-    echo "$BASHPID: $ME: my_sighup: killing children"
-    local need_mock_cleanup
-    kill_descendents  $BASHPID 0 0 0
-    need_mock_cleanup=$?
-    echo "$BASHPID: $ME: my_sighup: waiting"
-    wait
-    echo "$BASHPID: $ME: my_sighup: wait complete"
-    if [ $need_mock_cleanup -ne 0 ]; then
-        umount_mock_root_as_tmpfs_all
-    fi
-    exit 1
-}
-
-function my_sigabrt() {
-    echo "$BASHPID: $ME: my_sigabrt: killing children"
-    local need_mock_cleanup
-    kill_descendents  $BASHPID 0 0 0
-    need_mock_cleanup=$?
-    echo "$BASHPID: $ME: my_sigabrt: waiting"
-    wait
-    echo "$BASHPID: $ME: my_sigabrt: wait complete"
-    if [ $need_mock_cleanup -ne 0 ]; then
-        umount_mock_root_as_tmpfs_all
-    fi
-    exit 1
-}
-
-function my_sigterm() {
-    echo "$BASHPID: $ME: my_sigterm: killing children"
-    local need_mock_cleanup
-    kill_descendents  $BASHPID 0 0 0
-    need_mock_cleanup=$?
-    echo "$BASHPID: $ME: my_sigterm: waiting"
-    wait
-    echo "$BASHPID: $ME: my_sigterm: wait complete"
-    echo "$BASHPID: $ME: my_sigterm: need_mock_cleanup=$need_mock_cleanup"
-    if [ $need_mock_cleanup -ne 0 ]; then
-        umount_mock_root_as_tmpfs_all
-    fi
-    exit 1
-}
-
-trapwrap() {
-    local WCMD=$1
-    shift
-    declare -i pid status=255
-    # set the trap for the foreground process
-    trap my_sigint INT
-    trap my_sighup HUP
-    trap my_sigabrt ABRT
-    trap my_sigterm TERM
-    # run the command in background
-    ### "$@" & pid=$!
-    WARGS=()
-    x=0
-    for i in "$@"; do
-        WARGS[$x]="$i"
-        x=$((x+1))
-    done
-    echo "$WCMD ${WARGS[@]/#/}"
-    $WCMD "${WARGS[@]/#/}" & pid=$!
-    # wait until bg command finishes, handling interruptions by trapped signals
-    while (( status > 128 )); do
-        wait $pid
-        status=$?
-    done
-    # restore the trap
-    trap - INT
-    trap - HUP
-    trap - ABRT
-    trap - TERM
-    # return the command exit status
-    return $status
-}
-
-trapwrap_n() {
-    local ARG=$1
-    shift
-    local WCMD=$1
-    shift
-    declare -i pid status=255
-    # set the trap for the foreground process
-    trap my_exit_n EXIT
-    trap "my_sigint_n $ARG" INT
-    trap "my_sighup_n $ARG" HUP
-    trap "my_sigabrt_n $ARG" ABRT
-    trap "my_sigterm_n $ARG" TERM
-    # run the command in background
-    WARGS=()
-    x=0
-    for i in "$@"; do
-        WARGS[$x]="$i"
-        x=$((x+1))
-    done
-    echo "$WCMD ${WARGS[@]/#/}"
-    $WCMD "${WARGS[@]/#/}" & pid=$!
-    # wait until bg command finishes, handling interruptions by trapped signals
-    while (( status > 128 )); do
-        wait $pid
-        status=$?
-    done
-    # restore the trap
-    trap - INT
-    trap - HUP
-    trap - ABRT
-    trap - TERM
-    # return the command exit status
-    return $status
-}
-
-trap my_exit EXIT
-
-mock_get_cache_dir () {
-      local CFG=$1
-      local CACHE_DIR="$MY_WORKSPACE/cache"
-      local CACHE_LINE=$(grep "config_opts[[][']cache_topdir['][]]" $CFG)
-      if [ $? -eq 0 ]; then
-         CACHE_DIR=$(echo "$CACHE_LINE" | awk -F \' '{ print $4 }')
-      fi
-      echo "$CACHE_DIR"
-}
-
-mock_get_root_dir () {
-      local CFG=$1
-      local ROOT_DIR="$MY_WORKSPACE/mock"
-      local ROOT_LINE=$(grep "config_opts[[][']root['][]]" $CFG)
-      if [ $? -eq 0 ]; then
-         ROOT_DIR="$MY_WORKSPACE/"$(echo "$ROOT_LINE" | awk -F \' '{ print $4 }')
-      fi
-      echo "$ROOT_DIR"
-}
-
-mock_clean_cfg () {
-      local CFG=$1
-      echo "${FUNCNAME[0]}: $CFG"
-      echo "=================================="
-      mock_clean_cache_cfg $CFG
-      echo "=================================="
-      echo "$MOCK --root $CFG --configdir $(dirname $CFG) --scrub=all"
-      trapwrap_n $CFG $MOCK --root $CFG --configdir $(dirname $CFG) --scrub=all
-      echo "=================================="
-      echo "$MOCK --root $CFG --configdir $(dirname $CFG) --clean"
-      trapwrap_n $CFG $MOCK --root $CFG --configdir $(dirname $CFG) --clean
-      ### Note:  this sometimes leaves behind a $MY_WORKSPACE/cache/mock/yum_cache/yumcache.lock
-      echo "=================================="
-      mock_clean_cache_all_cfg $CFG
-      echo "=================================="
-}
-
-mock_sub_configs () {
-   find $MY_WORKSPACE/configs/$MY_BUILD_ENVIRONMENT* -name "$MY_BUILD_ENVIRONMENT*b[0-9]*.cfg"
-}
-
-mock_clean () {
-   echo "${FUNCNAME[0]}: in"
-   echo "=================================="
-   remove_mock_symlinks $MY_BUILD_CFG
-   set_mock_symlinks $MY_BUILD_CFG
-   echo "=================================="
-   for SUB_CFG in $(mock_sub_configs); do
-      local PREFIX=$(echo $SUB_CFG | rev | cut -d . -f 2 | rev)
-      ( mock_clean_cfg $SUB_CFG 2>&1 | sed "s#^#${PREFIX}: #" ; exit ${PIPESTATUS[0]} ) &
-   done
-   wait
-   echo "=================================="
-   remove_mock_symlinks $MY_BUILD_CFG
-   echo "${FUNCNAME[0]}: out"
-}
-
-mock_partial_clean_cfg () {
-   local CFG=$1
-   local SRPMS_LIST="$2"
-   local RPMS_LIST="$3"
-   local CMD
-   local TMP
-   local RC
-
-   echo "${FUNCNAME[0]}: CFG=$CFG  SRPMS_LIST='$SRPMS_LIST'  RPMS_LIST='$RPMS_LIST'"
-
-   TMP=$(mktemp /tmp/mock_partial_clean_cfg_XXXXXX)
-   if [ $? -ne 0 ]; then
-      echo "${FUNCNAME[0]}: mktemp failed"
-      return 1
-   fi
-
-   local ROOT_DIR=$(mock_get_root_dir $CFG)
-
-   if [ -d $ROOT_DIR/root/builddir/build/SOURCES ]; then
-      echo "rm -rf $ROOT_DIR/root/builddir/build/SOURCES/*"
-      \rm -rf $ROOT_DIR/root/builddir/build/SOURCES/* 2>> /dev/null
-   fi
-
-   if [ -d $ROOT_DIR/root/builddir/build/SPECS ]; then
-      echo "rm -rf $ROOT_DIR/root/builddir/build/SPECS/*"
-      \rm -rf $ROOT_DIR/root/builddir/build/SPECS/* 2>> /dev/null
-   fi
-
-   for s in $SRPMS_LIST; do
-      f=$(basename $s)
-      if [ -f $ROOT_DIR/root/builddir/build/SRPMS/$f ]; then
-         \rm -f -v $ROOT_DIR/root/builddir/build/SRPMS/$f 2>> /dev/null
-      fi
-      if [ -f $ROOT_DIR/root/builddir/build/originals/$f ]; then
-         \rm -f -v $ROOT_DIR/root/builddir/build/originals/$f 2>> /dev/null
-      fi
-   done
-
-   for r in $RPMS_LIST; do
-      for d in $(find $ROOT_DIR/root/builddir/build/BUILD/ -maxdepth 1 -name '$r*' 2>> /dev/null); do
-         echo "rm -rf $d"
-         \rm -rf $d 2>> /dev/null
-      done
-      if [ -d $ROOT_DIR/root/builddir/build/RPMS ]; then
-         for f in $(find $ROOT_DIR/root/builddir/build/RPMS -maxdepth 1 -name "$r*rpm" 2>> /dev/null); do
-            \rm -f -v $f 2>> /dev/null
-         done
-      fi
-   done
-
-
-   local NO_CLEAN_LIST=$(create-no-clean-list)
-   echo "NO_CLEAN_LIST=$NO_CLEAN_LIST"
-
-   local RPMS_CLEAN_LIST=""
-   local NEED_FULL_MOCK_CLEAN=0
-   for r in $RPMS_LIST; do
-       if ! str_lst_contains $r "$NO_CLEAN_LIST" ; then
-           RPMS_CLEAN_LIST=$(join_by ' ' $RPMS_CLEAN_LIST $r)
-       else
-           echo "Can't remove '$r' from mock environment without a wipe";
-           NEED_FULL_MOCK_CLEAN=1
-       fi
-   done
-
-   if [ $NEED_FULL_MOCK_CLEAN -eq 1 ]; then
-       echo "Wipe the mock environment"
-       mock_clean_cfg $CFG
-       RC=$?
-   else
-       # Intent of following is for $RPMS_LIST to be expand now while the remaining $ varaibles are for bash inside mock to expand
-       echo "Try to uninstall from the mock environment these packages: $RPMS_CLEAN_LIST"
-       CMD='LST="'$RPMS_CLEAN_LIST'";
-            DELETE_LIST="";
-            for r in $LST; do
-                  FOUND=$(rpm  -q $r) ;
-                  if [ $? -eq 0 ]; then
-                     DELETE_LIST="$DELETE_LIST $FOUND";
-                  fi;
-            done;
-            echo "uninstalling these packages: $DELETE_LIST";
-            if [ "$DELETE_LIST" != "" ]; then
-                rpm  -e --nodeps $DELETE_LIST;
-            fi'
-       echo "$MOCK --root $CFG --configdir $(dirname $CFG) --chroot bash -c $CMD" &> $TMP
-       trapwrap_n $CFG $MOCK --root $CFG --configdir $(dirname $CFG) --chroot "bash -c '$CMD'" &>> $TMP
-       RC=$?
-       if [ $RC -ne 0 ]; then
-           cat $TMP
-           \rm -f $TMP
-           return $RC
-       fi
-
-       mock_clean_cache_cfg $CFG
-       RC=$?
-       \rm -f $TMP
-   fi
-
-   return $RC
-}
-
-mock_partial_clean () {
-   local SRPMS_LIST="$1"
-   local RPMS_LIST="$2"
-   echo "${FUNCNAME[0]}: in"
-   echo "${FUNCNAME[0]}: '$SRPMS_LIST'  '$RPMS_LIST'"
-   echo "=================================="
-   local NO_CLEAN_LIST=$(create-no-clean-list)
-   echo "=================================="
-   for SUB_CFG in $(mock_sub_configs); do
-      local PREFIX=$(echo $SUB_CFG | rev | cut -d . -f 2 | rev)
-      ( mock_partial_clean_cfg $SUB_CFG "$SRPMS_LIST" "$RPMS_LIST" 2>&1 | sed "s#^#${PREFIX}: #" ; exit ${PIPESTATUS[0]} ) &
-   done
-   wait
-   echo "=================================="
-   echo "${FUNCNAME[0]}: out"
-}
-
-mock_clean_cache_cfg () {
-   local CFG=$1
-   local TMP
-   local RC
-
-   echo "${FUNCNAME[0]}: $CFG  '$SRPMS_LIST'  '$RPMS_LIST'"
-
-   TMP=$(mktemp /tmp/mock_clean_cache_cfg_XXXXXX)
-   if [ $? -ne 0 ]; then
-      echo "${FUNCNAME[0]}: mktemp failed"
-      return 1
-   fi
-
-   echo "${FUNCNAME[0]}: $CFG"
-
-   clean_yum_cache_cfg $CFG
-
-   echo "$MOCK --root $CFG --configdir $(dirname $CFG) --scrub=root-cache --scrub=yum-cache --scrub=cache" &> $TMP
-   trapwrap_n $CFG $MOCK --root $CFG --configdir $(dirname $CFG) --scrub=root-cache --scrub=yum-cache --scrub=cache &>> $TMP
-   RC=$?
-   if [ $RC -ne 0 ]; then
-      cat $TMP
-   fi
-
-   \rm -f $TMP
-   return $RC
-}
-
-mock_clean_cache () {
-   echo "${FUNCNAME[0]}: in"
-   for SUB_CFG in $(mock_sub_configs); do
-      local PREFIX=$(echo $SUB_CFG | rev | cut -d . -f 2 | rev)
-      ( mock_clean_cache_cfg $SUB_CFG 2>&1 | sed "s#^#${PREFIX}: #" ; exit ${PIPESTATUS[0]} ) &
-   done
-   wait
-   # mock_clean_cache_cfg $BUILD_CFG
-   echo "${FUNCNAME[0]}: out"
-}
-
-mock_clean_cache_all_cfg () {
-   local CFG=$1
-
-   echo "${FUNCNAME[0]}: $CFG"
-   echo "=================================="
-   clean_yum_cache_cfg $CFG
-   echo "=================================="
-   echo "$MOCK --root $CFG --configdir $(dirname $CFG) --scrub=all"
-   trapwrap_n $CFG $MOCK --root $CFG --configdir $(dirname $CFG) --scrub=all
-   echo "=================================="
-}
-
-mock_clean_cache_all () {
-   echo "${FUNCNAME[0]}: in"
-   for SUB_CFG in $(mock_sub_configs); do
-      local PREFIX=$(echo $SUB_CFG | rev | cut -d . -f 2 | rev)
-      ( mock_clean_cache_all_cfg $SUB_CFG 2>&1 | sed "s#^#${PREFIX}: #" ; exit ${PIPESTATUS[0]} ) &
-   done
-   wait
-   echo "${FUNCNAME[0]}: out"
-}
-
-mock_clean_metadata_cfg () {
-   local CFG=$1
-   local TMP
-   local RC
-
-   echo "${FUNCNAME[0]}: $CFG"
-
-   TMP=$(mktemp /tmp/mock_partial_clean_cfg_XXXXXX)
-   if [ $? -ne 0 ]; then
-      echo "${FUNCNAME[0]}: mktemp failed"
-      return 1
-   fi
-
-   #
-   # From mock config, extract the embedded yum/dnf config.
-   # Then extract the repo definitions,
-   # and convert to a series of yum commands to clean the 
-   # metadata one repo at a time.   e.g.
-   # CMD="yum --disablerepo=* --enablerepo=StxCentos7Distro clean metadata; \
-   #      yum --disablerepo=* --enablerepo=StxCentos7Distro-rt clean metadata;
-   #      ...
-   #     "
-   #
-   CMD=$((grep -e config_opts\\[\'yum.conf\'\\\] $CFG \
-               -e config_opts\\[\'dnf.conf\'\\\] $CFG | \
-          sed 's#\\n#\n#g') | \
-         grep '^[[]' | \
-         grep -v main | \
-         sed -e 's/[][]//g' -e "s#^#${PKG_MANAGER} --disablerepo=* --enablerepo=#" -e 's#$# clean metadata#' | \
-         sort -u | \
-         tr '\n' ';')
-   echo "$MOCK --root $CFG --configdir $(dirname $CFG) --chroot bash -c $CMD" &> $TMP
-   trapwrap_n $CFG $MOCK --root $CFG --configdir $(dirname $CFG) --chroot "bash -c '($CMD)'" &>>$TMP
-   RC=$?
-   if [ $RC -ne 0 ]; then
-      cat $TMP
-   fi
-   \rm -f $TMP
-   return $RC
-}
-
-mock_clean_metadata () {
-   echo "${FUNCNAME[0]}: in"
-   for SUB_CFG in $(mock_sub_configs); do
-      local PREFIX=$(echo $SUB_CFG | rev | cut -d . -f 2 | rev)
-      ( mock_clean_metadata_cfg $SUB_CFG 2>&1 | sed "s#^#${PREFIX}: #" ; exit ${PIPESTATUS[0]} ) &
-   done
-   wait
-   echo "${FUNCNAME[0]}: out"
-}
-
-update_cgcs_repo () {
-   local REPO_NAME=$1
-   (
-    cd $MY_REPO/$REPO_NAME/
-
-    local CURR_HEAD=$(git rev-parse HEAD)
-    local LAST_HEAD_FILE="$MY_REPO/$REPO_NAME/.last_head"
-    local LAST_HEAD_FILE_OLD="$MY_WORKSPACE/$REPO_NAME.last_head"
-    local CHANGED
-    local NEW_UNTRACKED
-    local NEED_REBUILD
-    local NEED_MOCK_CLEAN=0
-    local d
-
-    if [ -f LAST_HEAD_FILE_OLD -a ! -f LAST_HEAD_FILE ]; then
-       \cp LAST_HEAD_FILE_OLD LAST_HEAD_FILE
-    fi
-
-    local LAST_HEAD=$(cat $LAST_HEAD_FILE | head -n 1)
-
-    for d in "Binary" "Source"; do
-       NEED_REBUILD=0
-       if [ ! -d $d/repodata ]; then
-          NEED_REBUILD=1
-       fi
-       if [ "$CURR_HEAD" != "$LAST_HEAD" ]; then
-          NEED_REBUILD=1
-       fi
-
-       CHANGED=$(git diff --name-only | grep $d)
-       if [ "x$CHANGED" != "x" ]; then
-          NEED_REBUILD=1
-       fi
-
-       NEW_UNTRACKED=$(git ls-files . --exclude-standard --others | grep $d)
-       if [ "x$NEW_UNTRACKED" != "x" ]; then
-          NEED_REBUILD=1
-       fi
-
-       if [ $NEED_REBUILD -eq 1 ]; then
-          NEED_MOCK_CLEAN=1
-          echo ""
-          echo "Need to recreate $REPO_NAME/$d/repodata"
-          mkdir -p $d
-
-          if [ -d $d/repodata ]; then
-             update_repodata "$d"
-          else
-             recreate_repodata "$d"
-          fi
-
-          create_lst "$d"
-       fi
-    done
-    echo "$CURR_HEAD" > $LAST_HEAD_FILE
-    \cp $LAST_HEAD_FILE $LAST_HEAD_FILE_OLD
-    if [ $NEED_MOCK_CLEAN -eq 1 ]; then
-      echo ""
-      echo "Need to clean mock"
-      mock_clean
-      set_mock_symlinks $MY_BUILD_CFG
-    fi
-   )
-}
-
-mock_clean_mounts_dir () {
-   local MOUNT=$1
-   local RC
-
-   if [ "$MOUNT" == "" ]; then
-      return 1
-   fi
-   mount | grep "$MOUNT" >> /dev/null
-   if [ $? -eq 0 ]; then
-      RC=1
-      which mock_cache_umount >> /dev/null
-      if [ $? -eq 0 ]; then
-         echo "umount '$MOUNT'"
-         mock_cache_umount "$MOUNT"
-         if [ $? -eq 0 ]; then
-            RC=0
-         fi
-      fi
-      if [ $RC -eq 1 ]; then
-         echo "ERROR: Directory '$MOUNT' is already mounted and will cause a build failure within mock."
-         echo "Ask your system administrator to umount '$MOUNT'."
-         exit 1
-      fi
-   fi
-   return 0
-}
-
-mock_clean_mounts_cfg () {
-   local CFG=$1
-   local ROOT_DIR=$(mock_get_root_dir $CFG)
-   local YUM_CACHE_MOUNT=$(readlink -f "$ROOT_DIR/root/var/cache/yum")
-   local PROC_MOUNT=$(readlink -f "$ROOT_DIR/root/proc")
-   local SYS_MOUNT=$(readlink -f "$ROOT_DIR/root/sys")
-   local SHM_MOUNT=$(readlink -f "$ROOT_DIR/root/dev/shm")
-   local PTS_MOUNT=$(readlink -f "$ROOT_DIR/root/dev/pts")
-   local MOUNT
-
-   echo "${FUNCNAME[0]}: $CFG"
-   for MOUNT in "$YUM_CACHE_MOUNT" "$PROC_MOUNT" "$SYS_MOUNT" "$SHM_MOUNT" "$PTS_MOUNT"; do
-      mock_clean_mounts_dir "$MOUNT"
-   done
-}
-
-mock_clean_mounts () {
-   echo "${FUNCNAME[0]}: in"
-   for SUB_CFG in $(mock_sub_configs); do
-      local PREFIX=$(echo $SUB_CFG | rev | cut -d . -f 2 | rev)
-      ( mock_clean_mounts_cfg $SUB_CFG 2>&1 | sed "s#^#${PREFIX}: #" ; exit ${PIPESTATUS[0]} ) &
-   done
-   wait
-   echo "${FUNCNAME[0]}: out"
-}
-
-clean_yum_cache_cfg () {
-   local CFG=$1
-   local CACHE_DIR=$(mock_get_cache_dir $CFG)
-   local ROOT_DIR=$(mock_get_root_dir $CFG)
-   local RC=0
-
-   echo "${FUNCNAME[0]}: $CFG"
-
-   local YUM_CACHE_MOUNT=$(readlink -f "$ROOT_DIR/root/var/cache/yum")
-   local YUM_CACHE_LOCK="$CACHE_DIR/mock/yum_cache/yumcache.lock"
-   # echo "clean_yum_cache YUM_CACHE_MOUNT='$YUM_CACHE_MOUNT' YUM_CACHE_LOCK='$YUM_CACHE_LOCK'"
-
-   if [ "$YUM_CACHE_MOUNT" != "" ]; then
-      mock_clean_mounts_dir "$YUM_CACHE_MOUNT"
-   fi
-
-   if [ -f "$YUM_CACHE_LOCK" ]; then
-      RC=1
-      which mock_cache_unlock >> /dev/null
-      if [ $? -eq 0 ]; then
-         mock_cache_unlock "$YUM_CACHE_LOCK"
-         if [ $? -eq 0 ]; then
-            RC=0
-         fi
-      fi
-      if [ $RC -eq 1 ]; then
-         echo "ERROR: File '$YUM_CACHE_LOCK' exists and will cause a build failure within mock."
-         echo "Ask your system administrator to delete '$YUM_CACHE_LOCK'."
-         exit 1
-      fi
-   fi
-   return $RC
-}
-
-
-clean_yum_cache () {
-   echo "${FUNCNAME[0]}: in"
-   for SUB_CFG in $(mock_sub_configs); do
-      local PREFIX=$(echo $SUB_CFG | rev | cut -d . -f 2 | rev)
-      ( clean_yum_cache_cfg $SUB_CFG 2>&1 | sed "s#^#${PREFIX}: #" ; exit ${PIPESTATUS[0]} ) &
-   done
-   wait
-   echo "${FUNCNAME[0]}: out"
-}
-
-mock_update_cfg () {
-   local CFG=$1
-   echo "${FUNCNAME[0]}: $CFG"
-   echo "=================================="
-   set_mock_symlinks $CFG
-   echo "$MOCK --root $CFG --configdir $(dirname $CFG) --update"
-   trapwrap_n $CFG $MOCK --root $CFG --configdir $(dirname $CFG) --update
-   echo "=================================="
-}
-
-mock_init_cfg () {
-   local CFG=$1
-   echo "${FUNCNAME[0]}: $CFG"
-   echo "=================================="
-   set_mock_symlinks $CFG
-   echo "$MOCK --root $CFG --configdir $(dirname $CFG) --init"
-   trapwrap_n $CFG $MOCK --root $CFG --configdir $(dirname $CFG) --init
-   echo "=================================="
-}
-
-mock_update_or_init_cfg () {
-   local CFG=$1
-   local TMP
-   local RC
-   echo "${FUNCNAME[0]}: $CFG"
-   local ROOT_DIR=$(mock_get_root_dir $CFG)
-
-   TMP=$(mktemp /tmp/mock_update_or_init_cfg_XXXXXX)
-   if [ $? -ne 0 ]; then
-      echo "${FUNCNAME[0]}: mktemp failed"
-      return 1
-   fi
-   if [ -d $ROOT_DIR/root ]; then
-      echo "Updating the mock environment"
-      set_mock_symlinks $CFG
-      echo "$MOCK --root $CFG --configdir $(dirname $CFG) --update"
-      trapwrap_n $CFG $MOCK --root $CFG --configdir $(dirname $CFG) --update  &> $TMP
-      RC=$?
-   else
-      echo "Init the mock environment"
-      set_mock_symlinks $CFG
-      echo "$MOCK --root $CFG --configdir $(dirname $CFG) --init"
-      trapwrap_n $CFG $MOCK --root $CFG --configdir $(dirname $CFG) --init &> $TMP
-      RC=$?
-   fi
-   if [ $RC -ne 0 ]; then
-      cat $TMP
-   fi
-   \rm -f $TMP
-   return $RC
-}
-
-mock_update_or_init () {
-   echo "${FUNCNAME[0]}: in"
-   for SUB_CFG in $(mock_sub_configs); do
-      local PREFIX=$(echo $SUB_CFG | rev | cut -d . -f 2 | rev)
-      ( mock_update_or_init_cfg $SUB_CFG 2>&1 | sed "s#^#${PREFIX}: #" ; exit ${PIPESTATUS[0]} ) &
-   done
-   wait
-   echo "${FUNCNAME[0]}: out"
-}
-
-if [ "x$PROJECT" == "x" ]; then
-    echo "PROJECT environmnet variable is not defined."
-    exit 1
-fi
-
-if [ "x$SRC_BUILD_ENVIRONMENT" == "x" ]; then
-    echo "SRC_BUILD_ENVIRONMENT environmnet variable is not defined."
-    exit 1
-fi
-
-NO_DESCENDANTS=0
-NO_REQUIRED=0
-NO_AUTOCLEAN=0
-NO_BUILD_INFO=0
-HELP=0
-CLEAN_FLAG=0
-FORMAL_FLAG=0
-CAREFUL=0
-DEP_TEST_FLAG=0
-
-# read the options
-TEMP=$(getopt -o h --long parallel,std,rt,installer,containers,no-required,no-descendants,no-autoclean,no-build-info,dep-test,clean,tmpfs-clean,formal,careful,help,layer: -n "$ME" -- "$@")
-
-if [ $? -ne 0 ]; then
-    usage
-    exit 1
-fi
-
-eval set -- "$TEMP"
-
-export BUILD_TYPE=std
-trap my_exit EXIT
-
-# extract options and their arguments into variables.
-while true ; do
-    case "$1" in
-        --careful) CAREFUL=1 ; shift ;;
-        --no-descendants) NO_DESCENDANTS=1 ; shift ;;
-        --no-required) NO_REQUIRED=1 ; shift ;;
-        --no-autoclean) NO_AUTOCLEAN=1; shift ;;
-        --no-build-info) NO_BUILD_INFO=1; shift ;;
-        --formal) FORMAL_FLAG=1; shift ;;
-        --std) BUILD_TYPE=std; shift ;;
-        --rt) BUILD_TYPE=rt; shift ;;
-        --installer) BUILD_TYPE=installer; shift ;;
-        --containers) BUILD_TYPE=containers; shift ;;
-        -h|--help) HELP=1 ; shift ;;
-        --clean) CLEAN_FLAG=1 ; shift ;;
-        --dep-test) DEP_TEST_FLAG=1 ; MAX_WORKERS=1; NO_DESCENDANTS=1; NO_REQUIRED=1; NO_BUILD_INFO=1; shift ;;
-        --tmpfs-clean) if [ -n "$MY_WORKSPACE" ]; then export MY_WORKSPACE=$MY_WORKSPACE/$BUILD_TYPE; exit 0; fi ;;
-        --parallel) shift ;;
-        --layer) export LAYER=$2 ; shift ; shift ;;
-        --) shift ; break ;;
-        *) echo "Internal error!" ; exit 1 ;;
-    esac
-done
-
-# Reset variables
-if [ -n "$MY_WORKSPACE" ]; then
-   export MY_WORKSPACE_TOP=${MY_WORKSPACE_TOP:-$MY_WORKSPACE}
-   export MY_WORKSPACE=$MY_WORKSPACE_TOP/$BUILD_TYPE
-else
-   export MY_PATCH_WORKSPACE_TOP=${MY_PATCH_WORKSPACE_TOP:-$MY_PATCH_WORKSPACE}
-   export MY_PATCH_WORKSPACE=$MY_PATCH_WORKSPACE_TOP/$BUILD_TYPE
-fi
-
-export MY_BUILD_DIR_TOP=${MY_BUILD_DIR_TOP:-$MY_BUILD_DIR}
-export MY_BUILD_DIR=$MY_BUILD_DIR_TOP/$BUILD_TYPE
-
-export MY_BUILD_ENVIRONMENT_TOP=${MY_BUILD_ENVIRONMENT_TOP:-$MY_BUILD_ENVIRONMENT}
-export MY_BUILD_ENVIRONMENT=$MY_BUILD_ENVIRONMENT_TOP-$BUILD_TYPE
-
-export MY_SRC_RPM_BUILD_DIR=$MY_BUILD_DIR/rpmbuild
-export MY_BUILD_ENVIRONMENT_FILE=$MY_BUILD_ENVIRONMENT.cfg
-export MY_BUILD_CFG=$MY_WORKSPACE/$MY_BUILD_ENVIRONMENT_FILE
-export MY_MOCK_ROOT=$MY_WORKSPACE/mock/root
-
-IMAGE_INC_FILE="${MY_WORKSPACE}/image.inc"
-image_inc_list iso std ${DISTRO} > "${IMAGE_INC_FILE}"
-
-DEV_IMAGE_INC_FILE="${MY_WORKSPACE}/image-dev.inc"
-image_inc_list iso dev ${DISTRO} > "${DEV_IMAGE_INC_FILE}"
-
-for STREAM in stable dev; do
-    WHEELS_INC_FILE="${MY_WORKSPACE}/${DISTRO}_${STREAM}_wheels.inc"
-    wheels_inc_list ${STREAM} ${DISTRO} > "${WHEELS_INC_FILE}"
-done
-
-LAST_PLATFORM_RELEASE_FILE="$MY_BUILD_DIR/.platform_release"
-
-TARGETS=$@
-
-if [ $HELP -eq 1 ]; then
-    usage
-    exit 0
-fi
-
-if [ $FORMAL_FLAG -eq 1 ]; then
-   export FORMAL_BUILD=1
-fi
-
-SRC_ROOT="$MY_REPO"
-if [ "x$MY_REPO" == "x" ]; then
-   SRC_ROOT=$HOME
-fi
-
-BUILD_ROOT="$MY_WORKSPACE"
-if [ "x$MY_WORKSPACE" == "x" ]; then
-   BUILD_ROOT="$MY_PATCH_WORKSPACE"
-
-   if [ "x$MY_PATCH_WORKSPACE" == "x" ]; then
-       echo "ERROR: reqiure one of MY_WORKSPACE or MY_PATCH_WORKSPACE be defined"
-       exit 1
-   fi
-fi
-
-export BUILD_BASE="$BUILD_ROOT"
-export CCACHE_DIR="$BUILD_ROOT/.ccache"
-export RESULT_DIR="$BUILD_BASE/results"
-export SRC_BASE="$SRC_ROOT"
-export STX_BASE=$SRC_BASE/stx
-
-if [ "x$MY_SRC_RPM_BUILD_DIR" != "x" ]; then
-    RPM_BUILD_ROOT=$MY_SRC_RPM_BUILD_DIR
-else
-    RPM_BUILD_ROOT=$BUILD_BASE/rpmbuild
-fi
-
-RELEASE_INFO_FILE="$(get_release_info)"
-
-if [ -f "$RELEASE_INFO_FILE" ]; then
-   source "$RELEASE_INFO_FILE"
-else
-   echo "Warning: failed to find RELEASE_INFO_FILE=$RELEASE_INFO_FILE"
-fi
-
-if [ "x$PLATFORM_RELEASE" == "x" ]; then
-   echo "Warning: PLATFORM_RELEASE is not defined in $RELEASE_INFO_FILE"
-   PLATFORM_RELEASE="00.00"
-fi
-
-export RPM_BUILD_BASE="$RPM_BUILD_ROOT"
-export SRPM_OUT="$RPM_BUILD_BASE/SRPMS"
-export RPM_DIR="$RPM_BUILD_BASE/RPMS"
-export SPECS_DIR="$RPM_BUILD_BASE/SPECS"
-export SOURCES_DIR="$RPM_BUILD_BASE/SOURCES"
-export PLATFORM_RELEASE
-
-if [ ! -d $BUILD_BASE ]; then
-   if [ $CLEAN_FLAG -eq 1 ]; then
-       exit 0
-   fi
-   echo "ERROR: expected to find directory at '$BUILD_BASE'"
-   exit 1
-fi
-
-
-mkdir -p $RPM_BUILD_BASE
-if [ $? -ne 0 ]; then
-   echo "ERROR: Failed to create directory '$RPM_BUILD_BASE'"
-   exit 1
-fi
-
-mkdir -p $SRPM_OUT/repodata
-if [ $? -ne 0 ]; then
-   echo "ERROR: Failed to create directory '$SRPM_OUT/repodata'"
-   exit 1
-fi
-
-mkdir -p $RPM_DIR/repodata
-if [ $? -ne 0 ]; then
-   echo "ERROR: Failed to create directory '$RPM_DIR/repodata'"
-   exit 1
-fi
-
-if [ "x$MY_BUILD_CFG" == "x" ];then
-   echo "ERROR: reqiure MY_BUILD_CFG to be defined"
-   exit 1
-fi
-
-export BUILD_CFG="$MY_BUILD_CFG"
-
-# Place build-time environement variables in mock environment
-echo "FORMAL_BUILD=$FORMAL_BUILD"
-echo "modify-build-cfg $BUILD_CFG"
-${BUILD_RPMS_PARALLEL_DIR}/modify-build-cfg $BUILD_CFG
-if [ $? -ne 0 ]; then
-       echo "Could not modifiy $BUILD_CFG";
-       exit 1
-fi
-
-if [ ! -f $BUILD_CFG ]; then
-   echo "ERROR: Mock config file not found at '$BUILD_CFG'"
-   exit 1
-fi
-
-# create temp dir
-export TMPDIR="$MY_WORKSPACE/tmp"
-mkdir -p "$TMPDIR"
-
-# Create symlinks from /var/... to /localdisk/loadbuild/... if on a build server
-
-set_mock_symlinks $MY_BUILD_CFG
-
-if [ $CLEAN_FLAG -eq 1 ]; then
-    umount_mock_root_as_tmpfs_all
-fi
-
-if [ $CLEAN_FLAG -eq 0 ]; then
-    ls $SRPM_OUT/*.src.rpm &>> /dev/null
-    if [ $? -ne 0 ]; then
-        echo "Nothing to build in '$SRPM_OUT'"
-        exit 0
-    fi
-fi
-
-ALL=0
-UNRESOLVED_TARGETS=" "
-
-if [ $DEP_TEST_FLAG -eq 1 ]; then
-    # we expect exactly one package
-    if [ $(echo $TARGETS | wc -w) -ne 1 ]; then
-        echo "ERROR: dependency testing requires exactly one package"
-        usage
-        exit 1
-    fi
-else
-    # we accept a list of packages, and no list implies all
-    if [ "x$TARGETS" == "x" ]; then
-        echo "make: all"
-        ALL=1
-    else
-        echo "make: $TARGETS"
-        UNRESOLVED_TARGETS="$TARGETS"
-    fi
-fi
-
-if [ "$BUILD_TYPE" != "std" ]; then
-    # This defines ...
-    #    STD_SRPM_PKG_NAME_TO_PATH
-    #    STD_SRPM_PKG_NAMES
-    srpm_build_std_dictionary $MY_WORKSPACE_TOP/std/rpmbuild/SRPMS
-fi
-
-# This defines ...
-#    SRPM_PKG_NAME_TO_PATH
-#    SRPM_PKG_NAMES
-srpm_build_dictionary $SRPM_OUT
-
-SRPMS_TO_COMPILE=()
-SRPMS_LIST=""
-RPMS_LIST=""
-
-clean_list () {
-   local SRPMS_LIST="$1"
-   local RPMS_LIST="$2"
-   local ALL=$3
-   local TARGET
-   local b
-   local d
-   local f
-   local n
-   local p
-   local r
-   local s
-   local sn
-   local t
-   local SPEC_DIR
-
-   echo "${FUNCNAME[0]}: '$SRPMS_LIST'  '$RPMS_LIST'  '$ALL'"
-   if [ $ALL -eq 1 ]; then
-       for r in $(find $RPM_DIR -name "*.rpm"); do
-           \rm -f -v $r
-       done
-
-       if [ $CLEAN_FLAG -eq 1 ]; then
-          for d in $(find $SPECS_DIR -type d); do
-             echo "rm -rf $d"
-              \rm -rf "$d" 2>> /dev/null
-          done
-       fi
-
-       for d in $(find $RESULT_DIR/$USER-* -maxdepth 1 -type d 2>> /dev/null); do
-           echo "rm -rf $d"
-           \rm -rf "$d" 2>> /dev/null
-       done
-   else
-       for s in $SRPMS_LIST; do
-           (
-           SPEC_DIR=$(spec_cache_dir_from_srpm $s)
-           sn=$(rpm_get_name $s)
-           update_spec_cache $s
-
-           TARGET=$(rpm -qp --qf '%{NAME}-%{VERSION}\n' "$s")
-           for d in $(find $RESULT_DIR/$USER-* -maxdepth 1 -name "$TARGET*" 2>> /dev/null); do
-               echo "rm -rf $d"
-               \rm -rf "$d" 2>> /dev/null
-           done
-
-           for p in $(ls -1 $SPEC_DIR/BUILDS); do
-               for r in $(find $RESULT_DIR/$USER-* $RPM_DIR -name "$p-*.rpm" 2>> /dev/null); do
-                   if [ -f $r ]; then
-                       n=$(rpm_get_name $r)
-                       if [ "$n" == "$p" ]; then
-                          if [[ "$r" == *.src.rpm ]]; then
-                              if [ "$n" != "$sn" ]; then
-                                 continue
-                              fi
-
-                              TARGET=$(rpm -qp --qf '%{NAME}-%{VERSION}\n' "$r")
-                              for d in $(find $RESULT_DIR/$USER-* -maxdepth 1 -name "$TARGET*" 2>> /dev/null); do
-                                  echo "rm -rf $d"
-                                  \rm -rf "$d" 2>> /dev/null
-                              done
-
-                          else
-                              rs=$(rpm_get_srpm $r)
-                              if [[ "$rs" != "$sn"-[0-9]* ]]; then
-                                  continue
-                              fi
-                          fi
-
-                          \rm -f -v $r
-                       fi
-                   fi
-               done
-           done
-
-           TARGET=$(rpm -qp --qf '%{NAME}-%{VERSION}\n' "$s")
-
-           if [ $CLEAN_FLAG -eq 1 ]; then
-               for d in $(find $SPECS_DIR -type d -name "$TARGET*" 2>> /dev/null); do
-                   echo "rm -rf $d"
-                    \rm -rf "$d" 2>> /dev/null
-               done
-           fi
-
-           for d in $(find $RESULT_DIR/$USER-* -maxdepth 1 -name "$TARGET*" 2>> /dev/null); do
-               echo "rm -rf $d"
-               \rm -rf "$d" 2>> /dev/null
-           done
-           ) &
-       done
-       echo "waiting on file deletion"
-       wait
-       echo "wait complete"
-   fi
-
-   echo ""
-   echo "Cleaning repodata"
-   for d in $(find -L  $MY_WORKSPACE/rpmbuild $MY_WORKSPACE/results   -type d -name repodata); do
-      recreate_repodata $(dirname $d)
-      create_lst $(dirname $d)
-   done
-
-   echo ""
-   echo "Cleaning mock environment"
-   echo ""
-
-   if [ $ALL -eq 1 ]; then
-       # Wipe everything
-       if [ "x$RPM_DIR" != "x" ]; then
-           \rm -rf -v $RPM_DIR/* 2>> /dev/null
-       fi
-
-       \rm -f -v $RESULT_DIR/mockchain.log 2>> /dev/null
-       mock_clean
-   else
-       # If dependency test
-       if [ $DEP_TEST_FLAG -eq 1 ]; then
-           mock_clean
-       else
-           # Wipe only traces of what we built
-           mock_partial_clean "$SRPMS_LIST" "$RPMS_LIST"
-       fi
-   fi
-}
-
-echo "ALL=$ALL"
-(
-trap my_exit EXIT
-trap my_sigint INT
-trap my_sighup HUP
-echo "$CMDLINE"
-echo "ALL=$ALL"
-
-if [ $CLEAN_FLAG -eq 0 ]; then
-  if [ -d $RESULT_DIR ]; then
-    # in case previous build recieved a ctrl-C and didn't get a change to copy it's successful work into RPM_DIR
-    for d in $(find $RESULT_DIR -name '*.rpm' | grep -v '[.]src[.]rpm' | xargs --no-run-if-empty --max-args=1 dirname | sort -u); do
-        rsync -u $d/*.rpm $RPM_DIR
-    done
-    for d in $(find -L $RESULT_DIR  -type d -name repodata); do
-       update_repodata $(dirname $d)
-    done
-  fi
-fi
-
-spec_cache_dir_from_srpm () {
-   local SRPM=${1}
-   local SPEC_DIR=$(echo $SRPM | sed 's#/SRPMS/#/SPECS/#')
-   echo "$SPEC_DIR"
-}
-
-update_spec_cache () {
-   local SRPM=${1}
-   local SPEC_DIR=$(spec_cache_dir_from_srpm $SRPM)
-   local NEED_UPDATE=0
-
-   if [ ! -d $SPEC_DIR ]; then
-      mkdir -p  $SPEC_DIR
-      NEED_UPDATE=1
-   else
-      find "$SPEC_DIR" -name '*.spec' | grep 'spec' >> /dev/null
-      if [ $? -ne 0 ]; then
-         # No spec file
-         NEED_UPDATE=1
-      fi
-
-      find "$SPEC_DIR" -not -newermm "$SRPM" -name '*.spec' | grep -q 'spec'
-      if [ $? -eq 0 ]; then
-         # spec is older than src.rpm
-         NEED_UPDATE=1
-      fi
-   fi
-
-   if [ $NEED_UPDATE -ne 0 ]; then
-      (
-      cd $SPEC_DIR
-      \rm -rf BUILDS BUILDS_VR *.spec 2>> /dev/null
-      mkdir -p BUILDS
-      mkdir -p NAMES
-      mkdir -p SERVICES
-      mkdir -p BUILDS_VR
-      rpm2cpio $SRPM | cpio -civ '*.spec'
-      if [ $? -ne 0 ]; then
-         echo "ERROR: no spec file found in '$SRPM'"
-      fi
-      for f in $(find . -name '*.spec' | sort -V); do
-         touch $f
-         for p in $(spec_list_ver_rel_packages $f); do
-            touch "BUILDS_VR/$p"
-         done
-         for p in $(spec_list_packages $f); do
-            touch "BUILDS/$p"
-         done
-         for p in $(spec_find_tag Name $f 2>> /dev/null); do
-            touch "NAMES/$p"
-         done
-         for p in $(spec_find_global service $f 2>> /dev/null); do
-            touch "SERVICES/$p"
-         done
-      done
-      )
-   fi
-}
-
-# Find the list of packages we must compile
-
-echo "Find the list of packages we must compile"
-
-mkdir -p $MY_WORKSPACE/tmp/
-NEED_BUILD_DIR=$(mktemp -d $MY_WORKSPACE/tmp/$USER-$ME-need-build-XXXXXX)
-if [ $? -ne 0 ] || [ "x$NEED_BUILD_DIR" == "x" ]; then
-    echo "Failed to create temp directory under $MY_WORKSPACE/tmp"
-    exit 1
-fi
-
-UNRESOLVED_TARGETS_DIR=$(mktemp -d $MY_WORKSPACE/tmp/$USER-$ME-unresolved-XXXXXX)
-if [ $? -ne 0 ] || [ "x$UNRESOLVED_TARGETS_DIR" == "x" ]; then
-    echo "Failed to create temp directory under $MY_WORKSPACE/tmp"
-    exit 1
-fi
-
-for n in ${UNRESOLVED_TARGETS}; do
-    touch $UNRESOLVED_TARGETS_DIR/$n
-done
-
-PLATFORM_RELEASE_CHANGED=0
-if [ -f $LAST_PLATFORM_RELEASE_FILE ]; then
-    LAST_PLATFORM_RELEASE=$(cat $LAST_PLATFORM_RELEASE_FILE)
-    if [ "$LAST_PLATFORM_RELEASE" != "$PLATFORM_RELEASE" ]; then
-        PLATFORM_RELEASE_CHANGED=1
-    fi
-else
-    PLATFORM_RELEASE_CHANGED=1
-fi
-
-for n in "${SRPM_PKG_NAMES[@]}"; do
-    (
-    s=${SRPM_PKG_NAME_TO_PATH[$n]}
-    SPEC_DIR=$(spec_cache_dir_from_srpm $s)
-    update_spec_cache $s
-    # echo "$BASHPID: considering $n: $s, SPEC_DIR=$SPEC_DIR"
-    NEED_BUILD=0
-
-    if [ "x$TARGETS" == "x" ]; then
-        # We weren't given a list of build targets.
-        # Build anything missing or out of date.
-        NEED_BUILD=0
-        BN=$(basename ${s//.src.rpm/})
-
-        if [ -f $RESULT_DIR/$MY_BUILD_ENVIRONMENT/$BN/fail ]; then
-            echo "Found: $RESULT_DIR/$MY_BUILD_ENVIRONMENT/$BN/fail"
-            echo "Previous build of $BN failed"
-            NEED_BUILD=1
-        elif [ ! -f $RESULT_DIR/$MY_BUILD_ENVIRONMENT/$BN/success ]; then
-            echo "Not Found: $RESULT_DIR/$MY_BUILD_ENVIRONMENT/$BN/success"
-            echo "No previous build of $BN"
-            NEED_BUILD=1
-        else
-            LOCAL_RPMS_VRA_LIST=$(ls -1 $SPEC_DIR/BUILDS_VR | tr '\n' ' ')
-
-            for f in $LOCAL_RPMS_VRA_LIST; do
-                m=$(find $RPM_DIR/$f*rpm 2>> /dev/null | wc -l)
-                if [ $m -eq 0 ] && [ -f "$UNBUILT_PATTERN_FILE" ]; then
-                    echo $f | grep -f "$UNBUILT_PATTERN_FILE" >> /dev/null && m=1
-                    if [ $m -eq 1 ]; then
-                       echo "Excluding '$f' due to match in UNBUILT_PATTERN_FILE '$UNBUILT_PATTERN_FILE'"
-                       if [ -f "$IMAGE_INC_FILE" ] ; then
-                          for t in $(grep -v '^#' "$IMAGE_INC_FILE"); do
-                             ii=$(echo $f | grep "^$t-[0-9]" | wc -l)
-                             if [ $ii -gt 0 ]; then
-                                echo "Including '$f' due to match in IMAGE_INC_FILE '$IMAGE_INC_FILE' due to pattern '^$t-[0-9]'"
-                                m=0
-                                break
-                             fi
-                          done
-                       fi
-                    fi
-                fi
-
-                newer=$(find $RPM_DIR/$f*rpm -type f -not -newermm $s 2>> /dev/null | wc -l)
-                # echo "$m  $newer=find $RPM_DIR/$f*rpm -type f -not -newermm $s 2>> /dev/null | wc -l"
-                if [ $m -eq 0 ] || [ $newer -gt 0 ] || [ $CLEAN_FLAG -eq 1 ]; then
-                    if [ $newer -gt 0 ]; then
-                        echo "Including '$f' due to newer code"
-                        find $RPM_DIR/$f*rpm -type f -not -newermm $s
-                    else
-                        if [ $m -eq 0 ]; then
-                            echo "Including '$f' due to m=0"
-                        else
-                           if [ $CLEAN_FLAG -eq 1 ]; then
-                               echo "Including '$f' due to CLEAN_FLAG=1"
-                           fi
-                        fi
-                    fi
-                    NEED_BUILD=1
-                    break
-                fi
-            done
-        fi
-    else
-        # We were given a list of build targets,
-        # try to find packages matching that list.
-        NEED_BUILD=0
-        for f in $(find $SPEC_DIR/NAMES $SPEC_DIR/SERVICES $SPEC_DIR/BUILDS -type f 2>> /dev/null); do
-            b=$(basename $f)
-            for t in $TARGETS; do
-                if [[ ( "$b" == "$t" ) || ( ( "$BUILD_TYPE" == "rt" ) && ( "$b" == "$t-rt" ) ) ]]; then
-                    echo "Including named target '$f'"
-                    TARGET_FOUND=$t
-                    NEED_BUILD=1
-                    # UNRESOLVED_TARGETS=$(echo "$UNRESOLVED_TARGETS" | sed "s/\(^\|[[:space:]]\)$TARGET_FOUND\([[:space:]]\|$\)/ /g")
-                    if [ -f $UNRESOLVED_TARGETS_DIR/$TARGET_FOUND ]; then
-                        \rm -f $UNRESOLVED_TARGETS_DIR/$TARGET_FOUND
-                    fi
-                    break
-                fi
-            done
-        done
-    fi
-
-    if [ $NO_BUILD_INFO -eq 0 ]; then
-        if [ "$n" == "build-info" ]; then
-            echo "Including '$n' by default"
-            NEED_BUILD=1
-        fi
-    fi
-
-    if [ $PLATFORM_RELEASE_CHANGED -eq 1 ]; then
-        grep '%{platform_release}' $SPEC_DIR/*.spec >> /dev/null
-        if [ $? -eq 0 ]; then
-            echo "Including '$n' due to changed platform_release"
-            NEED_BUILD=1
-        fi
-    fi
-
-    if [ $NEED_BUILD -eq 1 ]; then
-        echo "found $n: $s"
-        touch "$NEED_BUILD_DIR/$n"
-        # SRPMS_TO_COMPILE+=("$n")
-    fi
-    ) &
-done
-echo "waiting"
-wait
-for n in $(ls -1 $NEED_BUILD_DIR); do
-    SRPMS_TO_COMPILE+=("$n")
-done
-UNRESOLVED_TARGETS=" "
-for n in $(ls -1 $UNRESOLVED_TARGETS_DIR); do
-    UNRESOLVED_TARGETS="$UNRESOLVED_TARGETS $n"
-done
-\rm -rf $NEED_BUILD_DIR
-\rm -rf $UNRESOLVED_TARGETS_DIR
-
-ORIG_SRPMS_TO_COMPILE=( ${SRPMS_TO_COMPILE[@]} )
-
-echo "SRPMS_TO_COMPILE = ${SRPMS_TO_COMPILE[@]}"
-
-
-# adding dependant packages
-if [ $CLEAN_FLAG -eq 0 ] && [ $NO_DESCENDANTS -eq 0 ] && [ -f $SRPM_DIRECT_DESCENDANTS_FILE ]; then
-   echo
-   echo "adding dependant packages"
-
-   # This array will accumulate a list of secondary build targets.
-   TRANSITIVE_SRPMS_TO_COMPILE=()
-
-   # Add packages that directly depend on the primary build targets in ORIG_SRPMS_TO_COMPILE
-   for n in ${ORIG_SRPMS_TO_COMPILE[@]}; do
-       needs=( $(grep "^$n;" "$SRPM_DIRECT_DESCENDANTS_FILE" | sed "s/$n;//" | sed 's/,/ /g'; alt_n=$(echo "$n" | sed 's#-rt$##'); if [ "$alt_n" != "$n" ]; then grep "^$alt_n;" "$SRPM_DIRECT_DESCENDANTS_FILE" | sed "s/$alt_n;//" | sed 's/,/ /g' | sed 's#\([^[:space:]]*\)#\1-rt#g'; fi ) )
-
-       # intersection of 'needs' and 'SRPM_PKG_NAMES' ... i.e. what should be compiled that we have source for
-       compilable_needs=( $(intersection needs SRPM_PKG_NAMES) )
-       TRANSITIVE_SRPMS_TO_COMPILE=( $(union compilable_needs TRANSITIVE_SRPMS_TO_COMPILE) )
-   done
-
-   # For non-std build, and if non specific build targets are named, then search all
-   # packages that we might build and check if they require a package that DID build
-   # in the std build.  If so build the package as a secondary target, even though the
-   # primary target was from a different build_type.
-   if [ "$BUILD_TYPE" != "std" ] && [ $ALL -eq 1 ] && [ -f $SRPM_TO_RPM_MAP_FILE ] && [ -f $SRPM_RPM_DIRECT_REQUIRES_FILE ]; then
-       # Test all that we can build ...
-       for n in ${SRPM_PKG_NAMES[@]}; do
-           contains ORIG_SRPMS_TO_COMPILE $n
-           if [ $? -eq 0 ]; then
-               # Already on the primary build list, skip it.
-               echo "skip $n"
-               continue
-           fi
-
-           STD_NEEDS_BUILD=0
-
-           # Iterate over all binary rpms names produce by the candidate package
-           for b in $(grep "^$n;" "$SRPM_TO_RPM_MAP_FILE" | sed "s/$n;//" | sed 's/,/ /g'); do
-               # find an rpm file with the rpm name we seek
-               for bp in $(find $RPM_DIR -name "$b-[0-9]*.rpm" | grep -v '.src.rpm'); do
-                   if [ "$b" != "$(rpm_get_name $bp)" ]; then
-                       # rpm name doesn't match
-                       continue
-                   fi
-
-                   # Iterate over binary rpms names required by the candidate package
-                   for r in $(grep "^$n;" "$SRPM_RPM_DIRECT_REQUIRES_FILE" | sed "s/$n;//" | sed 's/,/ /g'); do
-                       if [ $r == $n ]; then
-                           # Ignore self dependency
-                           continue
-                       fi
-
-                       # find a required rpm file with the rpm name we seek, AND is newer than the produced rpm file
-                       for rp in $(find $(echo $RPM_DIR | sed "s#/$BUILD_TYPE/#/std/#") -name "$r-[0-9]*.rpm" -newermm $bp | grep -v '.src.rpm'); do
-                           if [ "$r" != "$(rpm_get_name $rp)" ]; then
-                               # rpm name doesn't match
-                               continue
-                           fi
-
-                           # Ok, a required rpm is newer than a built rpm, we should rebuild!
-                           echo "rebuild '$n' due to newer '$r'"
-                           STD_NEEDS_BUILD=1
-                           break
-                       done
-                   done
-               done
-
-               # Avoid pointless processing if we already have a positive result.
-               if [ $STD_NEEDS_BUILD -eq 1 ]; then
-                   break
-               fi
-           done
-
-           if [ $STD_NEEDS_BUILD -eq 1 ]; then
-               # Compile is requires due to an updated required package in the std build.
-               # Add 'n' to array TRANSITIVE_SRPMS_TO_COMPILE.
-               TRANSITIVE_SRPMS_TO_COMPILE=( $(put TRANSITIVE_SRPMS_TO_COMPILE $n) )
-           fi
-       done
-   fi
-
-   # If the kernel or kernel-rt packages where absent from the primary build targets, but
-   # added as a secondary target, then make sure all out-of-tree kernel modules are also
-   # added.
-   for n in kernel kernel-rt; do
-       KERNEL_IN_ORIG=0
-       KERNEL_IN_TRANSITIVE=0
-       contains ORIG_SRPMS_TO_COMPILE "$n" && KERNEL_IN_ORIG=1
-       contains TRANSITIVE_SRPMS_TO_COMPILE "$n" && KERNEL_IN_TRANSITIVE=1
-       if [ $KERNEL_IN_TRANSITIVE -eq 1 ] && [ $KERNEL_IN_ORIG -eq 0 ]; then
-           needs=( $(grep "^$n;" "$SRPM_DIRECT_DESCENDANTS_FILE" | sed "s/$n;//" | sed 's/,/ /g'; alt_n=$(echo "$n" | sed 's#-rt$##'); if [ "$alt_n" != "$n" ]; then grep "^$alt_n;" "$SRPM_DIRECT_DESCENDANTS_FILE" | sed "s/$alt_n;//" | sed 's/,/ /g' | sed 's#\([^[:space:]]*\)#\1-rt#g'; fi ) )
-
-           # intersection of 'needs' and 'SRPM_PKG_NAMES' ... i.e. what should be compiled that we have source for
-           compilable_needs=( $(intersection needs SRPM_PKG_NAMES) )
-           TRANSITIVE_SRPMS_TO_COMPILE=( $(union compilable_needs TRANSITIVE_SRPMS_TO_COMPILE) )
-       fi
-   done
-
-   # Append the secondary targetc list to the primary list
-   SRPMS_TO_COMPILE=( $(union SRPMS_TO_COMPILE TRANSITIVE_SRPMS_TO_COMPILE) )
-   echo "SRPMS_TO_COMPILE = ${SRPMS_TO_COMPILE[@]}"
-fi
-
-
-MUST_SRPMS_TO_COMPILE=( ${SRPMS_TO_COMPILE[@]} )
-
-# adding required packages
-if [ $CLEAN_FLAG -eq 0 ] && [ "x$TARGETS" != "x" ] && [ $NO_REQUIRED -eq 0 ] && [ -f $SRPM_TRANSITIVE_REQUIRES_FILE ]; then
-   echo
-   echo "adding required packages"
-   TRANSITIVE_SRPMS_TO_COMPILE=()
-   for n in ${MUST_SRPMS_TO_COMPILE[@]}; do
-       needs=( $(grep "^$n;" "$SRPM_TRANSITIVE_REQUIRES_FILE" | sed "s/$n;//" | sed 's/,/ /g') )
-
-       # intersection of 'needs' and 'SRPM_PKG_NAMES' ... i.e. what should be compiled that we have source for
-       compilable_needs=( $(intersection needs SRPM_PKG_NAMES) )
-       TRANSITIVE_SRPMS_TO_COMPILE=( $(union compilable_needs TRANSITIVE_SRPMS_TO_COMPILE) )
-
-       for b in "${un[@]}"; do
-          echo $b
-       done
-   done
-
-   SRPMS_TO_COMPILE=( $(union TRANSITIVE_SRPMS_TO_COMPILE SRPMS_TO_COMPILE) )
-   echo "SRPMS_TO_COMPILE = ${SRPMS_TO_COMPILE[@]}"
-fi
-
-
-# Determine build order ... now done in mockchain4
-SRPMS_TO_COMPILE=( $(echo ${SRPMS_TO_COMPILE[@]} | sed 's/ /\n/g' | sort -u) )
-
-
-# convert pkg names to paths, clean work dirs if needed
-echo
-echo "Mapping packages to src rpm paths"
-for n in ${SRPMS_TO_COMPILE[@]}; do
-    s=${SRPM_PKG_NAME_TO_PATH[$n]}
-    SPEC_DIR=$(spec_cache_dir_from_srpm $s)
-    update_spec_cache $s
-
-    SRPMS_LIST="$SRPMS_LIST $s"
-    # echo "SRPMS_LIST = $SRPMS_LIST"
-
-    TMP_RPMS_LIST=$(ls -1 $SPEC_DIR/BUILDS | tr '\n' ' ')
-    RPMS_LIST="$RPMS_LIST $TMP_RPMS_LIST"
-done
-echo
-
-CENTOS_REPO=centos-repo
-if [ ! -d ${MY_REPO}/${CENTOS_REPO} ]; then
-    CENTOS_REPO=cgcs-centos-repo
-    if [ ! -d ${MY_REPO}/${CENTOS_REPO} ]; then
-        echo "ERROR: directory ${MY_REPO}/centos-repo not found."
-        exit 1
-    fi
-fi
-
-if [ $CLEAN_FLAG -eq 0 ]; then
-   update_cgcs_repo ${CENTOS_REPO}
-fi
-
-mock_clean_mounts
-
-# clean work dirs if needed
-CLEAN_BEFORE_BUILD_SRPM_LIST=""
-CLEAN_BEFORE_BUILD_RPM_LIST=""
-if [ $CLEAN_FLAG -eq 0 ]; then
-    echo
-    echo "Calculating minimal clean list"
-    for nm in ${SRPMS_TO_COMPILE[@]}; do
-        MUST_CLEAN=0
-        contains MUST_SRPMS_TO_COMPILE $nm && MUST_CLEAN=1
-
-        s=${SRPM_PKG_NAME_TO_PATH[$nm]}
-        SPEC_DIR=$(spec_cache_dir_from_srpm $s)
-        update_spec_cache $s
-
-        LOCAL_RPMS_LIST=$(ls -1 $SPEC_DIR/BUILDS | tr '\n' ' ')
-        LOCAL_RPMS_VRA_LIST=$(ls -1 $SPEC_DIR/BUILDS_VR | tr '\n' ' ')
-
-        for f in $LOCAL_RPMS_VRA_LIST; do
-            m=$(find $RPM_DIR/$f*rpm 2>> /dev/null | wc -l)
-            if [ -f "$UNBUILT_PATTERN_FILE" ]; then
-                echo $f | grep -f "$UNBUILT_PATTERN_FILE" >> /dev/null && m=1
-            fi
-
-            n=$(find $RPM_DIR/$f*rpm -type f -not -newermm $s 2>> /dev/null | wc -l)
-            # echo "$n=find $RPM_DIR/$f*rpm -type f -not -newermm $s 2>> /dev/null | wc -l"
-            if [ $m -eq 0 ] || [ $n -gt 0 ] || [ $MUST_CLEAN -eq 1 ]; then
-                CLEAN_BEFORE_BUILD_SRPM_LIST="$CLEAN_BEFORE_BUILD_SRPM_LIST $s"
-                CLEAN_BEFORE_BUILD_RPM_LIST="$CLEAN_BEFORE_BUILD_RPM_LIST $LOCAL_RPMS_LIST"
-                break
-            fi
-        done
-    done
-fi
-
-
-if [ "$UNRESOLVED_TARGETS" != " " ]; then
-    if [ $CLEAN_FLAG -eq 0 ]; then
-        echo ""
-        echo "ERROR: failed to resolve build targets: $UNRESOLVED_TARGETS"
-        exit 1
-    fi
-fi
-
-echo "SRPMS_LIST = $SRPMS_LIST"
-echo "RPMS_LIST = $RPMS_LIST"
-
-
-echo
-if [ $CLEAN_FLAG -eq 0 ]; then
-   # pre-create these directories as $USER,
-   # else mock will create them as root and fails to clean them.
-   # Note: keep these in sync with mockchain-parallel!
-   for i in $(seq 0 $((MAX_WORKERS-1))); do
-      mkdir -p $MY_WORKSPACE/mock/b$i
-      mkdir -p $MY_WORKSPACE/cache/b$i/mock
-   done
-
-   mock_update_or_init
-fi
-set_mock_symlinks $MY_BUILD_CFG
-
-echo
-echo "Cleaning"
-if [ $CLEAN_FLAG -eq 1 ]; then
-   # Clean what the user asked for
-   echo "========= clean_list '$SRPMS_LIST' '$RPMS_LIST' $ALL"
-   \rm -r -f -v $MY_WORKSPACE/mock-$USER-*
-   clean_list "$SRPMS_LIST" "$RPMS_LIST" "$ALL"
-
-   exit 0
-else
-   # Clean what we intend to build
-   if [ $NO_AUTOCLEAN -eq 1 ]; then
-      echo "no-autoclean was requested"
-   else
-      if [ "$CLEAN_BEFORE_BUILD_SRPM_LIST" != "" ]; then
-         echo "========= clean_list '$CLEAN_BEFORE_BUILD_SRPM_LIST' '$CLEAN_BEFORE_BUILD_RPM_LIST' 0"
-         clean_list "$CLEAN_BEFORE_BUILD_SRPM_LIST" "$CLEAN_BEFORE_BUILD_RPM_LIST" 0
-      fi
-   fi
-fi
-
-echo
-echo "Cleaning repodata"
-
-BUILD_ENVIRONMENT_DIR=$(basename $BUILD_CFG)
-BUILD_ENVIRONMENT_DIR=${BUILD_ENVIRONMENT_DIR%.*}
-LOCAL_URL=http://127.0.0.1:8088$BUILD_BASE/results/$BUILD_ENVIRONMENT_DIR/
-LOCAL_SRC_URL=http://127.0.0.1:8088$BUILD_BASE/rpmbuild/SRPMS/
-
-for d in $(find -L $RESULT_DIR  -type d -name repodata); do
-(cd $d/..
- if [ -f repodata/*comps*xml ]; then
-    \mv repodata/*comps*xml comps.xml
- fi
- \rm -rf repodata
-)
-done
-
-echo
-echo "Cleaning Metadata"
-
-MOCKCHAIN_LOG="$RESULT_DIR/mockchain.log"
-mkdir -p $RESULT_DIR
-touch $RESULT_DIR/build_start
-\rm -rf $MOCKCHAIN_LOG
-
-mock_clean_metadata
-
-echo
-echo "Building"
-
-recreate_repodata $BUILD_BASE/results/$BUILD_ENVIRONMENT_DIR
-
-CMD_PREFIX=""
-if [ -x /bin/ionice ]; then
-    CMD_PREFIX="nice -n 20 ionice -c Idle /bin/ionice "
-fi
-
-REAL_MOCKCHAIN=0
-MOCK_PASSTHROUGH="-m"
-MOCKCHAIN="mockchain-parallel"
-CHAIN_OPTION=""
-if file $(which mockchain) | grep -q 'Python script'; then
-    REAL_MOCKCHAIN=1
-fi
-
-CMD_OPTIONS="$MOCK_PASSTHROUGH --no-clean $MOCK_PASSTHROUGH --no-cleanup-after"
-if [ $CAREFUL -eq 1 ]; then
-   CMD_OPTIONS="$MOCK_PASSTHROUGH --no-cleanup-after"
-fi
-
-CMD_OPTIONS+=" $MOCK_PASSTHROUGH --enable-plugin=package_state"
-CMD_OPTIONS+=" --log=$MOCKCHAIN_LOG"
-
-echo "CAREFUL=$CAREFUL"
-
-# Sets WORKERS and MOCKCHAIN_RESOURCE_ALLOCATION
-compute_resources $SRPMS_LIST
-
-
-if [ -f $SRPM_RPM_DIRECT_REQUIRES_FILE ]; then
-    CMD_OPTIONS+=" --srpm-dependency-file $SRPM_RPM_DIRECT_REQUIRES_FILE"
-fi
-if [ -f "$RPM_DIRECT_REQUIRES_FILE" ]; then
-    CMD_OPTIONS+=" --rpm-dependency-file $RPM_DIRECT_REQUIRES_FILE"
-fi
-if [ -f "$RPM_TO_SRPM_MAP_FILE" ]; then
-    CMD_OPTIONS+=" --rpm-to-srpm-map-file $RPM_TO_SRPM_MAP_FILE"
-fi
-
-
-for s in $SRPMS_LIST; do
-    d=$(echo "$s" | sed 's#/SRPMS/#/SOURCES/#')
-    if [ -f $d/BIG ]; then
-        BUILD_SIZE=$(cat $d/BIG | { read first rest ; echo $first ; })
-        CMD_OPTIONS="$CMD_OPTIONS --mark-big-path $BUILD_SIZE:$s"
-    fi
-    if [ -f $d/SLOW ]; then
-        BUILD_SPEED=$(cat $d/SLOW | { read first rest ; echo $first ; })
-        CMD_OPTIONS="$CMD_OPTIONS --mark-slow-path $BUILD_SPEED:$s"
-    fi
-done
-echo "CMD_OPTIONS=$CMD_OPTIONS"
-
-echo "MAX_WORKERS=$MAX_WORKERS"
-echo "MOCKCHAIN_RESOURCE_ALLOCATION=$MOCKCHAIN_RESOURCE_ALLOCATION"
-
-
-CMD="$CMD_PREFIX $MOCKCHAIN --root $BUILD_CFG --localrepo $BUILD_BASE --recurse --workers=$MAX_WORKERS --worker-resources=$MOCKCHAIN_RESOURCE_ALLOCATION --basedir=$MY_WORKSPACE --tmp_prefix=$USER --addrepo=$LOCAL_URL --addrepo=$LOCAL_SRC_URL $CMD_OPTIONS $MOCK_PASSTHROUGH --rebuild"
-CMD_BUILD_LIST="$CHAIN_OPTION $SRPMS_LIST"
-echo ""
-echo "$CMD $MOCK_PASSTHROUGH --define='_tis_dist .tis' $MOCK_PASSTHROUGH --define='platform_release $PLATFORM_RELEASE' $CMD_BUILD_LIST"
-echo ""
-trapwrap stdbuf -o0 $CMD $MOCK_PASSTHROUGH --define="_tis_dist .tis" $MOCK_PASSTHROUGH --define="platform_release $PLATFORM_RELEASE" $CMD_BUILD_LIST
-MOCKCHAIN_RC=$?
-
-echo $PLATFORM_RELEASE > $LAST_PLATFORM_RELEASE_FILE
-
-if [ $CLEAN_FLAG -eq 0 ]; then
-    umount_mock_root_as_tmpfs_all
-fi
-
-for d in $(find $RESULT_DIR -name '*.rpm' | grep -v '[.]src[.]rpm' | xargs --max-args=1 dirname | sort -u); do
-    rsync -u $d/*.rpm $RPM_DIR
-done
-
-if [ $ALL -eq 1 ]; then
-    echo
-    echo "Auditing for obsolete srpms"
-    for r in $(find $RESULT_DIR $RPM_DIR -name '*.src.rpm'); do
-        (
-        f=$(basename $r)
-        if [ ! -f "$SRPM_OUT/$f" ]; then
-            \rm -fv $r
-        fi
-        ) &
-    done
-    echo "waiting for srpm audit to complete"
-    wait
-    echo "Auditing for obsolete rpms"
-    for r in $(find $RESULT_DIR $RPM_DIR -name '*.rpm' | grep -v 'src.rpm'); do
-        (
-        s=$(rpm_get_srpm $r)
-        if [ ! -f "$SRPM_OUT/$s" ]; then
-            echo "Failed to find '$SRPM_OUT/$s'"
-            \rm -fv $r
-        fi
-        ) &
-    done
-    echo "waiting for rpm audit to complete"
-    wait
-    echo "Audit complete"
-    echo ""
-fi
-
-if [ $MOCKCHAIN_RC -ne 0 ]; then
-   echo "ERROR: Failed to build rpms using '$CMD'"
-   exit 1
-fi
-
-echo "Recreate repodata"
-for d in $(find -L $MY_WORKSPACE/rpmbuild $MY_WORKSPACE/results  -type d -name repodata); do
-   update_repodata $(dirname "$d")
-   create_lst $(dirname "$d")
-done
-
-
-if [ -f $MOCKCHAIN_LOG ]; then
-    grep 'following pkgs could not be successfully built' $MOCKCHAIN_LOG >> /dev/null
-    if [ $? -eq 0 ]; then
-        FAILED_PKGS=""
-        for p in $(sed -n '/following pkgs could not be successfully built:/,/Results out to/p' $MOCKCHAIN_LOG | grep -v '*** Build Failed ***'  | sed 1d | sed '$ d' | cut -d ':' -f2-); do
-            PKG=$(basename $p)
-            FAILED_PKGS="$PKG  $FAILED_PKGS"
-        done
-        echo
-        echo "Failed to build packages:  $FAILED_PKGS"
-        exit 1
-    fi
-fi
-
-# If we're doing a nightly or formal build (i.e. not a developer build) then we
-# want to sign certain packages.  Note that only certain users (i.e. jenkins)
-# have the authority to requiest that packages be signed.
-#
-# Signing is not actually done on this server (the keys are kept safe on a
-# different server with very limited access) but we can invoke a script to
-# make calls to the signing server.  Note that this will NOT work if you are
-# not Jenkins and don't have access to the Jenkins cross server login keys.
-#
-# Note that both std and rt builds must be complete before invoking the signing
-# script
-if [ 0$FORMAL_BUILD -eq 1 ] && [ "$USER" == "jenkins" ]; then
-	if [ -e $MY_WORKSPACE_TOP/std ] && [ -e $MY_WORKSPACE_TOP/rt ]; then
-		# Create dir for log, if it doesn't exit
-		mkdir -p $MY_WORKSPACE_TOP/export
-		echo "We are jenkins, and we are trying to do a formal build -- calling signing server"
-		echo "  to sign boot RPMs with secure boot keys"
-
-		MY_WORKSPACE=$MY_WORKSPACE_TOP ${SIGN_SECURE_BOOT} > $MY_WORKSPACE_TOP/export/${SIGN_SECURE_BOOT_LOG} 2>&1
-		if [ $? -ne 0 ]; then
-			echo "Signing of packages failed -- see $MY_WORKSPACE_TOP/export/${SIGN_SECURE_BOOT_LOG}"
-			exit 1
-		fi
-	fi
-fi
-
-exit 0
-) 2>&1 | stdbuf -o0 awk '{ print strftime("%H:%M:%S"), $0; fflush(); }' | tee $(date "+$MY_WORKSPACE/build-rpms-parallel_%Y-%m-%d_%H-%M-%S.log") ; exit ${PIPESTATUS[0]}
diff --git a/build-tools/build-rpms-serial b/build-tools/build-rpms-serial
deleted file mode 100755
index 2a95b1f9..00000000
--- a/build-tools/build-rpms-serial
+++ /dev/null
@@ -1,2220 +0,0 @@
-#!/bin/bash
-
-#
-# Copyright (c) 2018-2020 Wind River Systems, Inc.
-#
-# SPDX-License-Identifier: Apache-2.0
-#
-
-#
-# Builds rpm files from src.rpm files.
-#
-# This version compiles one package at a time.
-#
-# The location of packages to be built is
-# $MY_WORKSPACE/<build-type>/rpmbuild/SRPMS.
-#
-# The build order is a derived from the BuildRequires in the
-# spec files in the src.rpms.  Note that the BuildRequires sometimes
-# create dependency loops, so no correct order can be computed.  In these
-# cases we add a retry loop.  As long as one new package builds, we
-# keep retrying the loop, until all are built, or no progress is made.
-# So please don't panic and CTRL-C just because you see a few error
-# messages go by!
-#
-
-export ME=$(basename "$0")
-CMDLINE="$ME $@"
-BUILD_RPMS_PARALLEL_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}" )" )"
-
-# Set PKG_MANAGER for our build environment.
-source "${BUILD_RPMS_PARALLEL_DIR}/pkg-manager-utils.sh"
-
-
-# Build for distribution.  Currently 'centos' is only supported value.
-export DISTRO="centos"
-
-CREATEREPO=$(which createrepo_c)
-if [ $? -ne 0 ]; then
-   CREATEREPO="createrepo"
-fi
-
-# Old repo path or new?
-LOCAL_REPO=${MY_REPO}/local-repo
-if [ ! -d ${LOCAL_REPO} ]; then
-    LOCAL_REPO=${MY_REPO}/cgcs-tis-repo
-    if [ ! -d ${LOCAL_REPO} ]; then
-        # This one isn't fatal, LOCAL_REPO is not required
-        LOCAL_REPO=${MY_REPO}/local-repo
-    fi
-fi
-
-# Make sure we have a dependency cache
-DEPENDANCY_DIR="${LOCAL_REPO}/dependancy-cache"
-SRPM_DIRECT_REQUIRES_FILE="$DEPENDANCY_DIR/SRPM-direct-requires"
-SRPM_TRANSITIVE_REQUIRES_FILE="$DEPENDANCY_DIR/SRPM-transitive-requires"
-SRPM_TRANSITIVE_DESCENDANTS_FILE="$DEPENDANCY_DIR/SRPM-transitive-descendants"
-SRPM_DIRECT_DESCENDANTS_FILE="$DEPENDANCY_DIR/SRPM-direct-descendants"
-SRPM_RPM_DIRECT_REQUIRES_FILE="$DEPENDANCY_DIR/SRPM-direct-requires-rpm"
-RPM_DIRECT_REQUIRES_FILE="$DEPENDANCY_DIR/RPM-direct-requires"
-RPM_TO_SRPM_MAP_FILE="$DEPENDANCY_DIR/rpm-to-srpm"
-SRPM_TO_RPM_MAP_FILE="$DEPENDANCY_DIR/srpm-to-rpm"
-
-UNBUILT_PATTERN_FILE="$MY_REPO/build-data/unbuilt_rpm_patterns"
-
-SIGN_SECURE_BOOT="sign-secure-boot"
-SIGN_SECURE_BOOT_LOG="sign-secure-boot.log"
-
-export MOCK=/usr/bin/mock
-
-BUILD_RPMS_SERIAL_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}" )" )"
-source "${BUILD_RPMS_SERIAL_DIR}/image-utils.sh"
-source "${BUILD_RPMS_SERIAL_DIR}/wheel-utils.sh"
-source "${BUILD_RPMS_SERIAL_DIR}/spec-utils"
-source "${BUILD_RPMS_SERIAL_DIR}/srpm-utils"
-
-HOME=$(pwd)
-
-usage () {
-    echo ""
-    echo "Usage: "
-    echo "   $ME [ [--rt] [--no-required] [--no-descendants] [--no-build-info] [--no-autoclean] [--formal] <optional list of package names> ]"
-    echo "   $ME --dep-test <package name>"
-    echo "   $ME --clean [ [--no-descendants] <optional list of package names> ]"
-    echo "   $ME --help"
-    echo ""
-}
-
-number_of_cpus () {
-    /usr/bin/nproc
-}
-
-join_by () { local IFS="$1"; shift; echo "$*"; }
-
-create-no-clean-list () {
-   local MY_YUM_CONF=$(create-yum-conf)
-   local NO_CLEAN_LIST_FILE=$MY_WORKSPACE/no_clean_list.txt
-   local NEED_REBUILD=0
-
-   if [ ! -f $NO_CLEAN_LIST_FILE ]; then
-       NEED_REBUILD=1
-   else
-       if [ -f $MY_BUILD_CFG ]; then
-           find "$MY_BUILD_CFG" -not -newermm "$NO_CLEAN_LIST_FILE" | grep -q $(basename $MY_BUILD_CFG)
-           if [ $? -eq 0 ]; then
-               NEED_REBUILD=1
-           fi
-       fi
-   fi
-
-   if [ $NEED_REBUILD -eq 1 ]; then
-       local install_groups=""
-       local install_packages=""
-       local p
-
-       for p in $(grep "config_opts\['chroot_setup_cmd'\]" $MY_BUILD_CFG | tail -n1 | cut -d '=' -f 2 | sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//' -e "s/^'//" -e "s/'$//" -e 's/^install //'); do
-          if [[ $p == @* ]] ; then
-              install_groups=$(join_by ' ' $install_groups $(echo $p | cut -c 2-))
-          else
-              install_packages=$(join_by ' ' $install_packages $p)
-          fi
-       done
-
-       local noclean_last_list_len=0
-       local noclean_list=""
-       local tmp_list=""
-       local g
-
-       for g in $install_groups; do
-           # Find mandatory packages in the group.
-           # Discard anything before (and including) 'Mandatory Packages:'
-           # and anything after (and including) 'Optional Packages:'.
-           # Also discard leading spaces or '+' characters.
-           tmp_list=$(${PKG_MANAGER} -c $MY_YUM_CONF groupinfo $g 2>> /dev/null \
-                        | awk 'f;/Mandatory Packages:/{f=1}' \
-                        | sed -n '/Optional Packages:/q;p' \
-                        | sed 's#[ +]*##')
-           noclean_list=$(join_by ' ' $noclean_list $tmp_list)
-       done
-
-       noclean_list=$(join_by ' ' $noclean_list $install_packages)
-       noclean_list=$(echo $noclean_list | tr ' ' '\n' | sort --uniq)
-       noclean_list_len=$(echo $noclean_list | wc -w)
-
-       while [ $noclean_list_len -gt $noclean_last_list_len ]; do
-           noclean_last_list_len=$noclean_list_len
-           noclean_list=$( (${PKG_MANAGER} -c $MY_YUM_CONF deplist $noclean_list 2>> /dev/null | grep provider: | awk '{ print $2 }' | awk -F . '{ print $1 }'; for p in $noclean_list; do echo $p; done) | sort --uniq)
-           noclean_list_len=$(echo $noclean_list | wc -w)
-       done
-
-       echo $noclean_list > $NO_CLEAN_LIST_FILE
-   fi
-
-   cat $NO_CLEAN_LIST_FILE
-}
-
-str_lst_contains() {
-   TARGET="$1"
-   LST="$2"
-   if [[ $LST =~ (^|[[:space:]])$TARGET($|[[:space:]]) ]] ; then
-      return 0
-   else
-      return 1
-   fi
-}
-
-
-#
-# Create a list of rpms in the directory
-#
-create_lst () {
-   local DIR=${1}
-
-       (cd $DIR
-        [ -f rpm.lst ] && \rm -rf rpm.lst
-        [ -f srpm.lst ] && \rm -rf srpm.lst
-        find . -name '*.rpm' -and -not -name '*.src.rpm' | sed 's#^[.][/]##' | sort > rpm.lst
-        find . -name '*.src.rpm' | sed 's#^[.][/]##' | sort > srpm.lst
-       )
-}
-
-#
-# Delete old repodata and reate a new one
-#
-recreate_repodata () {
-   local DIR=${1}
-
-       (
-        mkdir -p $DIR
-        cd $DIR
-        if [ -f repodata/*comps*xml ]; then
-           \mv repodata/*comps*xml comps.xml
-        fi
-        \rm -rf repodata
-        \rm -rf .repodata
-        if [ -f comps.xml ]; then
-           $CREATEREPO -g comps.xml --workers $(number_of_cpus) $(pwd)
-        else
-           $CREATEREPO --workers $(number_of_cpus) $(pwd)
-        fi
-       )
-}
-
-#
-# Update existing repodata
-#
-update_repodata () {
-   local DIR=${1}
-
-       (cd $DIR
-        TMP=$(mktemp /tmp/update_repodata_XXXXXX)
-        RC=0
-        if [ -f comps.xml ]; then
-           $CREATEREPO --update -g comps.xml --workers $(number_of_cpus) $(pwd) &> $TMP
-           RC=$?
-        else
-           $CREATEREPO --update --workers $(number_of_cpus) $(pwd) &> $TMP
-           RC=$?
-        fi
-        if [ $RC -ne 0 ]; then
-           cat $TMP
-        fi
-        \rm -f $TMP
-       )
-}
-
-#
-# return array that is the intersection of two other arrays
-#
-# NEW_ARRAY=( $( intersection ARRAY1 ARRAY2 ) )
-#
-intersection () {
-   local Aname=$1[@]
-   local Bname=$2[@]
-   local A=("${!Aname}")
-   local B=("${!Bname}")
-
-   # echo "${A[@]}"
-   # echo "${B[@]}"
-   for a in "${A[@]}"; do
-      # echo "a=$a"
-      for b in "${B[@]}"; do
-         # echo "b=$b"
-         if [ "$a" == "$b" ]; then
-            echo "$a"
-            break
-         fi
-      done
-   done
-}
-
-#
-# return array that is the union of two other arrays
-#
-# NEW_ARRAY=( $( union ARRAY1 ARRAY2 ) )
-#
-union () {
-   local Aname=$1[@]
-   local Bname=$2[@]
-   local A=("${!Aname}")
-   local B=("${!Bname}")
-   local a
-   local b
-
-   for a in "${A[@]}"; do
-      echo "$a"
-   done
-
-   for b in "${B[@]}"; do
-      local found=0
-      for a in "${A[@]}"; do
-         if [ "$a" == "$b" ]; then
-            found=1
-            break
-         fi
-      done
-      if [ $found -eq 0 ]; then
-         echo $b
-      fi
-   done
-}
-
-#
-# returns 0 if element is in the array
-#
-#  e.g.  contains ARRAY $SEEKING  && echo "$SEEKING is in 'ARRAY'"
-#
-contains () {
-   local Aname=$1[@]
-   local A=("${!Aname}")
-   local seeking=$2
-   local in=1
-
-    for a in "${A[@]}"; do
-        if [[ $a == $seeking ]]; then
-            in=0
-            break
-        fi
-    done
-    return $in
-}
-
-#
-# Append element to array if not present
-#
-# ARRAY=( $( put ARRAY $ELEMENT ) )
-#
-put () {
-   local Aname=$1[@]
-   local A=("${!Aname}")
-   local element="$2"
-   for a in "${A[@]}"; do
-      echo "$a"
-   done
-   contains A "$element" || echo "$element"
-}
-
-build_order_recursive () {
-   local target=$1
-   local idx
-   local remainder_list
-   local needs
-   local needs_list
-
-   for((idx=0;idx<${#UNORDERED_LIST[@]};idx++)); do
-      if [ ${UNORDERED_LIST[idx]} == $target ]; then
-         remainder_list=( ${UNORDERED_LIST[@]:0:$idx} ${UNORDERED_LIST[@]:$((idx + 1))} )
-         UNORDERED_LIST=( ${remainder_list[@]} )
-         needs=( $(grep "^$target;" "$SRPM_DIRECT_REQUIRES_FILE" | sed "s/$target;//" | sed 's/,/ /g') )
-         needs_list=( $(intersection needs remainder_list) )
-         for((idx=0;idx<${#needs_list[@]};idx++)); do
-             build_order_recursive ${needs_list[idx]}
-         done
-         echo $target
-         break
-      fi
-   done
-}
-
-build_order () {
-   local Aname=$1[@]
-   local original_list=("${!Aname}")
-   local needs
-   local needs_list
-   local remainder_list
-   local idx
-   local element
-   local next_start=0
-   local old_next_start=0
-   local progress=1
-
-   while [ ${#original_list[@]} -gt 0 ] && [ $progress -gt 0 ]; do
-      progress=0
-      old_next_start=$next_start
-      for((idx=$next_start;idx<${#original_list[@]};idx++)); do
-         element=${original_list[idx]}
-         next_start=$idx
-         remainder_list=( ${original_list[@]:0:$idx} ${original_list[@]:$((idx + 1))} )
-         needs=( $(grep "^$element;" "$SRPM_DIRECT_REQUIRES_FILE" | sed "s/$element;//" | sed 's/,/ /g') )
-         needs_list=( $(intersection needs remainder_list) )
-         if [ ${#needs_list[@]} -eq 0 ]; then
-            echo "$element"
-            original_list=( "${remainder_list[@]}" )
-            if [ $next_start -ge ${#original_list[@]} ]; then
-               next_start=0
-            fi
-            progress=1
-            break
-         fi
-      done
-      if [ $old_next_start -ne 0 ]; then
-         progress=1
-         next_start=0
-      fi
-   done
-
-   if [ ${#original_list[@]} -gt 0 ]; then
-      # Had trouble calculating a build order for these remaining packages, so stick them at the end
-      UNORDERED_LIST=( ${original_list[@]} )
-      while [ ${#UNORDERED_LIST[@]} -gt 0 ]; do
-         element=${UNORDERED_LIST[0]}
-         build_order_recursive $element
-      done
-   fi
-}
-
-set_mock_symlinks () {
-   local LNK
-   local DEST
-   local CFG=$1
-   if [ -d /localdisk/loadbuild/mock ]; then
-      mkdir -p $MY_WORKSPACE
-      LNK=$(echo "/localdisk/loadbuild/mock/$(basename $CFG)" | sed 's/.cfg$//')
-      if [ ! -L $LNK ] && [ -d $LNK ]; then
-         echo "WARNING: Found directory at '$LNK' when symlink was expected. Fixing..."
-         \rm -rf $LNK
-         if [ -d $LNK ]; then
-            \mv $LNK $LNK.clean_me
-         fi
-      fi
-      if [ -L $LNK ]; then
-         DEST=$(readlink $LNK)
-         if [ "$DEST" != "$MY_WORKSPACE" ] || [ ! -d "$MY_WORKSPACE" ]; then
-            echo "WARNING: Found broken symlink at '$LNK'. Fixing..."
-            \rm -f $LNK
-         fi
-      fi
-      if [ ! -L $LNK ]; then
-         if [ ! -d "$MY_WORKSPACE" ]; then
-            echo "ERROR: Can't create symlink from $LNK to $MY_WORKSPACE as destination does not exist."
-            exit 1
-         fi
-         ln -s $MY_WORKSPACE $LNK
-      fi
-   fi
-
-   if [ -d /localdisk/loadbuild/mock-cache ]; then
-      mkdir -p $MY_WORKSPACE/cache
-      LNK=$(echo "/localdisk/loadbuild/mock-cache/$(basename $CFG)" | sed 's/.cfg$//')
-      if [ ! -L $LNK ] && [ -d $LNK ]; then
-         echo "WARNING: Found directory at '$LNK' when symlink was expected. Fixing..."
-         \rm -rf $LNK
-         if [ -d $LNK ]; then
-            \mv $LNK $LNK.clean_me
-         fi
-      fi
-      if [ -L $LNK ]; then
-         DEST=$(readlink $LNK)
-         if [ "$DEST" != "$MY_WORKSPACE/cache" ] || [ ! -d "$MY_WORKSPACE/cache" ]; then
-            echo "WARNING: Found broken symlink at '$LNK'. Fixing..."
-            \rm -f $LNK
-         fi
-      fi
-      if [ ! -L $LNK ]; then
-         if [ ! -d "$MY_WORKSPACE/cache" ]; then
-            echo "ERROR: Can't create symlink from $LNK to $MY_WORKSPACE/cache as destination does not exist."
-            exit 1
-         fi
-         ln -s $MY_WORKSPACE/cache $LNK
-      fi
-   fi
-}
-
-remove_mock_symlinks () {
-   local LNK
-   local CFG=$1
-   if [ -d /localdisk/loadbuild/mock ]; then
-      LNK=$(echo "/localdisk/loadbuild/mock/$(basename $CFG)" | sed 's/.cfg$//')
-      if [ -L $LNK ]; then
-         \rm -f $LNK
-      fi
-      if [ -d $LNK ]; then
-         \rm -rf $LNK
-         if [ $? -ne 0 ]; then
-            \mv -f $LNK $LNK.clean_me
-         fi
-      fi
-   fi
-
-   if [ -d /localdisk/loadbuild/mock-cache ]; then
-      LNK=$(echo "/localdisk/loadbuild/mock-cache/$(basename $MY_BUILD_CFG)" | sed 's/.cfg$//')
-      if [ -L $LNK ]; then
-         \rm -f $MY_WORKSPACE/cache $LNK
-      fi
-      if [ -d $LNK ]; then
-         \rm -rf $LNK
-         if [ $? -ne 0 ]; then
-            \mv -f $LNK $LNK.clean_me
-         fi
-      fi
-   fi
-}
-
-kill_descendents ()
-{
-    local kill_pid=$1
-    local kill_all=$2
-    local need_stop=$3
-    local iteration=$4
-    local ret=0
-    local rc=0
-
-    # echo "kill_descendents pid=$kill_pid, all=$kill_all stop=$need_stop, iteration=$iteration"
-
-    local relevant_recursive_children="$ME"
-    local relevant_recursive_promote_children="mock"
-    local relevant_other_children="mockchain-parallel mockchain-parallel-1.3.4 mockchain-parallel-1.4.16 mockchain-parallel-2.6 mockchain-parallel-2.7"
-
-    local recursive_promote_children=$(for relevant_child in $relevant_recursive_promote_children; do pgrep -P $kill_pid $relevant_child; done)
-    local recursive_children=$(for relevant_child in $relevant_recursive_children; do pgrep -P $kill_pid $relevant_child; done)
-    local other_children=""
-
-    if [ $kill_all -eq 1 ]; then
-        recursive_promote_children=""
-        recursive_children=$(pgrep -P $kill_pid)
-    fi
-
-    if [ $iteration -eq 0 ]; then
-        other_children=$(for relevant_child in $relevant_other_children; do pgrep -P $kill_pid $relevant_child; done)
-        if [ "$other_children" != "" ]; then
-            ret=1
-        fi
-    fi
-
-    if [ $need_stop -eq 1 ]; then
-        for pid in $recursive_children $recursive_promote_children; do
-            kill -SIGSTOP $pid &> /dev/null
-        done
-    fi
-
-    for pid in $recursive_children; do
-        kill_descendents "$pid" $kill_all $need_stop $((iteration + 1))
-    done
-    for pid in $recursive_promote_children; do
-        kill_descendents "$pid" 1 1 $((iteration + 1))
-    done
-
-    # echo "kill: $recursive_children $recursive_promote_children"
-    for pid in $recursive_children $recursive_promote_children; do
-        kill $pid &> /dev/null
-        rc=$?
-        if [ $need_stop -eq 1 ]; then
-            kill -SIGCONT $pid &> /dev/null
-        fi
-        if [ $rc -eq 0 ] && [ $iteration -eq 0 ]; then
-            wait $pid
-        fi
-    done
-
-    # echo "kill: $other_children"
-    for pid in $other_children; do
-        kill $pid &> /dev/null
-        rc=$?
-        if [ $rc -eq 0 ] && [ $iteration -eq 0 ]; then
-            wait $pid
-        fi
-    done
-
-    return $ret
-}
-
-function my_exit_n() {
-    local need_mock_cleanup
-    # echo "$BASHPID: $ME: my_exit: killing children"
-    local need_mock_cleanup
-    kill_descendents  $BASHPID 0 0 0
-    need_mock_cleanup=$?
-    # echo "$BASHPID: $ME: my_exit: waiting"
-    wait
-    # echo "$BASHPID: $ME: my_exit: wait complete"
-    # echo "$BASHPID: $ME: my_exit: need_mock_cleanup=$need_mock_cleanup"
-}
-
-function my_exit() {
-    local need_mock_cleanup
-    # echo "$BASHPID: $ME: my_exit: killing children"
-    local need_mock_cleanup
-    kill_descendents  $BASHPID 0 0 0
-    need_mock_cleanup=$?
-    # echo "$BASHPID: $ME: my_exit: waiting"
-    wait
-    # echo "$BASHPID: $ME: my_exit: wait complete"
-    # echo "$BASHPID: $ME: my_exit: need_mock_cleanup=$need_mock_cleanup"
-    if [ $need_mock_cleanup -ne 0 ]; then
-        sleep 1
-    fi
-}
-
-function my_sigint_n() {
-    local ARG=$1
-    echo "$BASHPID: $ME: my_sigint_n: ARG=$ARG"
-    echo "$BASHPID: $ME: my_sigint_n: killing children"
-    local need_mock_cleanup
-    kill_descendents  $BASHPID 0 0 0
-    need_mock_cleanup=$?
-    echo "$BASHPID: $ME: my_sigint_n: waiting"
-    wait
-    echo "$BASHPID: $ME: my_sigint_n: wait complete"
-    if [ $need_mock_cleanup -ne 0 ]; then
-        umount_mock_root_as_tmpfs_cfg $ARG
-    fi
-    exit 1
-}
-
-function my_sighup_n() {
-    local ARG=$1
-    echo "$BASHPID: $ME: my_sighup_n: ARG=$ARG"
-    echo "$BASHPID: $ME: my_sighup_n: killing children"
-    local need_mock_cleanup
-    kill_descendents  $BASHPID 0 0 0
-    need_mock_cleanup=$?
-    echo "$BASHPID: $ME: my_sighup_n: waiting"
-    wait
-    echo "$BASHPID: $ME: my_sighup_n: wait complete"
-    if [ $need_mock_cleanup -ne 0 ]; then
-        umount_mock_root_as_tmpfs_cfg $ARG
-    fi
-    exit 1
-}
-
-function my_sigabrt_n() {
-    local ARG=$1
-    echo "$BASHPID: $ME: my_sigabrt_n: ARG=$ARG"
-    echo "$BASHPID: $ME: my_sigabrt_n: killing children"
-    local need_mock_cleanup
-    kill_descendents  $BASHPID 0 0 0
-    need_mock_cleanup=$?
-    echo "$BASHPID: $ME: my_sigabrt_n: waiting"
-    wait
-    echo "$BASHPID: $ME: my_sigabrt_n: wait complete"
-    if [ $need_mock_cleanup -ne 0 ]; then
-        umount_mock_root_as_tmpfs_cfg $ARG
-    fi
-    exit 1
-}
-
-function my_sigterm_n() {
-    local ARG=$1
-    echo "$BASHPID: $ME: my_sigterm_n: ARG=$ARG"
-    echo "$BASHPID: $ME: my_sigterm_n: killing children"
-    local need_mock_cleanup
-    kill_descendents  $BASHPID 0 0 0
-    need_mock_cleanup=$?
-    echo "$BASHPID: $ME: my_sigterm_n: waiting"
-    wait
-    echo "$BASHPID: $ME: my_sigterm_n: wait complete"
-    echo "$BASHPID: $ME: my_sigterm_n: need_mock_cleanup=$need_mock_cleanup"
-    if [ $need_mock_cleanup -ne 0 ]; then
-        umount_mock_root_as_tmpfs_cfg $ARG
-    fi
-    exit 1
-}
-
-function my_sigint() {
-    echo "$BASHPID: $ME: my_sigint: killing children"
-    local need_mock_cleanup
-    kill_descendents  $BASHPID 0 0 0
-    need_mock_cleanup=$?
-    echo "$BASHPID: $ME: my_sigterm_n: waiting"
-    wait
-    echo "$BASHPID: $ME: my_sigterm_n: wait complete"
-    exit 1
-}
-
-function my_sighup() {
-    echo "$BASHPID: $ME: my_sighup: killing children"
-    local need_mock_cleanup
-    kill_descendents  $BASHPID 0 0 0
-    need_mock_cleanup=$?
-    echo "$BASHPID: $ME: my_sighup: waiting"
-    wait
-    echo "$BASHPID: $ME: my_sighup: wait complete"
-    exit 1
-}
-
-function my_sigabrt() {
-    echo "$BASHPID: $ME: my_sigabrt: killing children"
-    local need_mock_cleanup
-    kill_descendents  $BASHPID 0 0 0
-    need_mock_cleanup=$?
-    echo "$BASHPID: $ME: my_sigabrt: waiting"
-    wait
-    echo "$BASHPID: $ME: my_sigabrt: wait complete"
-    exit 1
-}
-
-function my_sigterm() {
-    echo "$BASHPID: $ME: my_sigterm: killing children"
-    local need_mock_cleanup
-    kill_descendents  $BASHPID 0 0 0
-    need_mock_cleanup=$?
-    echo "$BASHPID: $ME: my_sigterm: waiting"
-    wait
-    echo "$BASHPID: $ME: my_sigterm: wait complete"
-    echo "$BASHPID: $ME: my_sigterm: need_mock_cleanup=$need_mock_cleanup"
-    exit 1
-}
-
-trapwrap() {
-    local WCMD=$1
-    shift
-    declare -i pid status=255
-    # set the trap for the foreground process
-    trap my_sigint INT
-    trap my_sighup HUP
-    trap my_sigabrt ABRT
-    trap my_sigterm TERM
-    # run the command in background
-    ### "$@" & pid=$!
-    WARGS=()
-    x=0
-    for i in "$@"; do
-        WARGS[$x]="$i"
-        x=$((x+1))
-    done
-    echo "$WCMD ${WARGS[@]/#/}"
-    $WCMD "${WARGS[@]/#/}" & pid=$!
-    # wait until bg command finishes, handling interruptions by trapped signals
-    while (( status > 128 )); do
-        wait $pid
-        status=$?
-    done
-    # restore the trap
-    trap - INT
-    trap - HUP
-    trap - ABRT
-    trap - TERM
-    # return the command exit status
-    return $status
-}
-
-trapwrap_n() {
-    local ARG=$1
-    shift
-    local WCMD=$1
-    shift
-    declare -i pid status=255
-    # set the trap for the foreground process
-    trap my_exit_n EXIT
-    trap "my_sigint_n $ARG" INT
-    trap "my_sighup_n $ARG" HUP
-    trap "my_sigabrt_n $ARG" ABRT
-    trap "my_sigterm_n $ARG" TERM
-    # run the command in background
-    WARGS=()
-    x=0
-    for i in "$@"; do
-        WARGS[$x]="$i"
-        x=$((x+1))
-    done
-    echo "$WCMD ${WARGS[@]/#/}"
-    $WCMD "${WARGS[@]/#/}" & pid=$!
-    # wait until bg command finishes, handling interruptions by trapped signals
-    while (( status > 128 )); do
-        wait $pid
-        status=$?
-    done
-    # restore the trap
-    trap - INT
-    trap - HUP
-    trap - ABRT
-    trap - TERM
-    # return the command exit status
-    return $status
-}
-
-trap my_exit EXIT
-
-mock_get_cache_dir () {
-      local CFG=$1
-      local CACHE_DIR="$MY_WORKSPACE/cache"
-      local CACHE_LINE=$(grep "config_opts[[][']cache_topdir['][]]" $CFG)
-      if [ $? -eq 0 ]; then
-         CACHE_DIR=$(echo "$CACHE_LINE" | awk -F \' '{ print $4 }')
-      fi
-      echo "$CACHE_DIR"
-}
-
-mock_get_root_dir () {
-      local CFG=$1
-      local ROOT_DIR="$MY_WORKSPACE/mock"
-      local ROOT_LINE=$(grep "config_opts[[][']root['][]]" $CFG)
-      if [ $? -eq 0 ]; then
-         ROOT_DIR="$MY_WORKSPACE/"$(echo "$ROOT_LINE" | awk -F \' '{ print $4 }')
-      fi
-      echo "$ROOT_DIR"
-}
-
-mock_clean_cfg () {
-      local CFG=$1
-      echo "${FUNCNAME[0]}: $CFG"
-      echo "=================================="
-      mock_clean_cache_cfg $CFG
-      echo "=================================="
-      echo "$MOCK --root $CFG --configdir $(dirname $CFG) --scrub=all"
-      trapwrap_n $CFG $MOCK --root $CFG --configdir $(dirname $CFG) --scrub=all
-      echo "=================================="
-      echo "$MOCK --root $CFG --configdir $(dirname $CFG) --clean"
-      trapwrap_n $CFG $MOCK --root $CFG --configdir $(dirname $CFG) --clean
-      ### Note:  this sometimes leaves behind a $MY_WORKSPACE/cache/mock/yum_cache/yumcache.lock
-      echo "=================================="
-      mock_clean_cache_all_cfg $CFG
-      echo "=================================="
-}
-mock_clean () {
-   echo "${FUNCNAME[0]}: in"
-   echo "=================================="
-   remove_mock_symlinks $MY_BUILD_CFG
-   set_mock_symlinks $MY_BUILD_CFG
-   echo "=================================="
-   mock_clean_cfg $BUILD_CFG
-   echo "=================================="
-   remove_mock_symlinks $MY_BUILD_CFG
-   echo "${FUNCNAME[0]}: out"
-}
-
-mock_partial_clean_cfg () {
-   local CFG=$1
-   local SRPMS_LIST="$2"
-   local RPMS_LIST="$3"
-   local CMD
-   local TMP
-   local RC
-
-   echo "${FUNCNAME[0]}: CFG=$CFG  SRPMS_LIST='$SRPMS_LIST'  RPMS_LIST='$RPMS_LIST'"
-
-   TMP=$(mktemp /tmp/mock_partial_clean_cfg_XXXXXX)
-   if [ $? -ne 0 ]; then
-      echo "${FUNCNAME[0]}: mktemp failed"
-      return 1
-   fi
-
-   local ROOT_DIR=$(mock_get_root_dir $CFG)
-
-   if [ -d $ROOT_DIR/root/builddir/build/SOURCES ]; then
-      echo "rm -rf $ROOT_DIR/root/builddir/build/SOURCES/*"
-      \rm -rf $ROOT_DIR/root/builddir/build/SOURCES/* 2>> /dev/null
-   fi
-
-   if [ -d $ROOT_DIR/root/builddir/build/SPECS ]; then
-      echo "rm -rf $ROOT_DIR/root/builddir/build/SPECS/*"
-      \rm -rf $ROOT_DIR/root/builddir/build/SPECS/* 2>> /dev/null
-   fi
-
-   for s in $SRPMS_LIST; do
-      f=$(basename $s)
-      if [ -f $ROOT_DIR/root/builddir/build/SRPMS/$f ]; then
-         \rm -f -v $ROOT_DIR/root/builddir/build/SRPMS/$f 2>> /dev/null
-      fi
-      if [ -f $ROOT_DIR/root/builddir/build/originals/$f ]; then
-         \rm -f -v $ROOT_DIR/root/builddir/build/originals/$f 2>> /dev/null
-      fi
-   done
-
-   for r in $RPMS_LIST; do
-      for d in $(find $ROOT_DIR/root/builddir/build/BUILD/ -maxdepth 1 -name '$r*' 2>> /dev/null); do
-         echo "rm -rf $d"
-         \rm -rf $d 2>> /dev/null
-      done
-      if [ -d $ROOT_DIR/root/builddir/build/RPMS ]; then
-         for f in $(find $ROOT_DIR/root/builddir/build/RPMS -maxdepth 1 -name "$r*rpm" 2>> /dev/null); do
-            \rm -f -v $f 2>> /dev/null
-         done
-      fi
-   done
-
-
-   local NO_CLEAN_LIST=$(create-no-clean-list)
-   echo "NO_CLEAN_LIST=$NO_CLEAN_LIST"
-
-   local RPMS_CLEAN_LIST=""
-   local NEED_FULL_MOCK_CLEAN=0
-   for r in $RPMS_LIST; do
-       if ! str_lst_contains $r "$NO_CLEAN_LIST" ; then
-           RPMS_CLEAN_LIST=$(join_by ' ' $RPMS_CLEAN_LIST $r)
-       else
-           echo "Can't remove '$r' from mock environment without a wipe";
-           NEED_FULL_MOCK_CLEAN=1
-       fi
-   done
-
-   if [ $NEED_FULL_MOCK_CLEAN -eq 1 ]; then
-       echo "Wipe the mock environment"
-       mock_clean_cfg $CFG
-       RC=$?
-   else
-       # Intent of following is for $RPMS_LIST to be expand now while the remaining $ varaibles are for bash inside mock to expand
-       echo "Try to uninstall from the mock environment these packages: $RPMS_CLEAN_LIST"
-       CMD='LST="'$RPMS_CLEAN_LIST'";
-            DELETE_LIST="";
-            for r in $LST; do
-                  FOUND=$(rpm  -q $r) ;
-                  if [ $? -eq 0 ]; then
-                     DELETE_LIST="$DELETE_LIST $FOUND";
-                  fi;
-            done;
-            echo "uninstalling these packages: $DELETE_LIST";
-            if [ "$DELETE_LIST" != "" ]; then
-                rpm  -e --nodeps $DELETE_LIST;
-            fi'
-       echo "$MOCK --root $CFG --configdir $(dirname $CFG) --chroot bash -c $CMD" &> $TMP
-       trapwrap_n $CFG $MOCK --root $CFG --configdir $(dirname $CFG) --chroot "bash -c '$CMD'" &>> $TMP
-       RC=$?
-       if [ $RC -ne 0 ]; then
-           cat $TMP
-           \rm -f $TMP
-           return $RC
-       fi
-
-       mock_clean_cache_cfg $CFG
-       RC=$?
-       \rm -f $TMP
-   fi
-
-   return $RC
-}
-
-mock_partial_clean () {
-   local SRPMS_LIST="$1"
-   local RPMS_LIST="$2"
-   echo "${FUNCNAME[0]}: in"
-   echo "${FUNCNAME[0]}: '$SRPMS_LIST'  '$RPMS_LIST'"
-   echo "=================================="
-   local NO_CLEAN_LIST=$(create-no-clean-list)
-   echo "=================================="
-   mock_partial_clean_cfg $BUILD_CFG "$SRPMS_LIST" "$RPMS_LIST"
-   echo "=================================="
-   echo "${FUNCNAME[0]}: out"
-}
-
-mock_clean_cache_cfg () {
-   local CFG=$1
-   local TMP
-   local RC
-
-   echo "${FUNCNAME[0]}: $CFG  '$SRPMS_LIST'  '$RPMS_LIST'"
-
-   TMP=$(mktemp /tmp/mock_clean_cache_cfg_XXXXXX)
-   if [ $? -ne 0 ]; then
-      echo "${FUNCNAME[0]}: mktemp failed"
-      return 1
-   fi
-
-   echo "${FUNCNAME[0]}: $CFG"
-
-   clean_yum_cache_cfg $CFG
-
-   echo "$MOCK --root $CFG --configdir $(dirname $CFG) --scrub=root-cache --scrub=yum-cache --scrub=cache" &> $TMP
-   trapwrap_n $CFG $MOCK --root $CFG --configdir $(dirname $CFG) --scrub=root-cache --scrub=yum-cache --scrub=cache &>> $TMP
-   RC=$?
-   if [ $RC -ne 0 ]; then
-      cat $TMP
-   fi
-
-   \rm -f $TMP
-   return $RC
-}
-
-mock_clean_cache () {
-   echo "${FUNCNAME[0]}: in"
-   mock_clean_cache_cfg $BUILD_CFG
-   echo "${FUNCNAME[0]}: out"
-}
-
-mock_clean_cache_all_cfg () {
-   local CFG=$1
-
-   echo "${FUNCNAME[0]}: $CFG"
-   echo "=================================="
-   clean_yum_cache_cfg $CFG
-   echo "=================================="
-   echo "$MOCK --root $CFG --configdir $(dirname $CFG) --scrub=all"
-   trapwrap_n $CFG $MOCK --root $CFG --configdir $(dirname $CFG) --scrub=all
-   echo "=================================="
-}
-
-mock_clean_cache_all () {
-   echo "${FUNCNAME[0]}: in"
-   mock_clean_cache_all_cfg $BUILD_CFG
-   echo "${FUNCNAME[0]}: out"
-}
-
-mock_clean_metadata_cfg () {
-   local CFG=$1
-   local TMP
-   local RC
-
-   echo "${FUNCNAME[0]}: $CFG"
-
-   TMP=$(mktemp /tmp/mock_partial_clean_cfg_XXXXXX)
-   if [ $? -ne 0 ]; then
-      echo "${FUNCNAME[0]}: mktemp failed"
-      return 1
-   fi
-
-   #
-   # From mock config, extract the embedded yum/dnf config.
-   # Then extract the repo definitions,
-   # and convert to a series of yum commands to clean the 
-   # metadata one repo at a time.   e.g.
-   # CMD="yum --disablerepo=* --enablerepo=StxCentos7Distro clean metadata; \
-   #      yum --disablerepo=* --enablerepo=StxCentos7Distro-rt clean metadata;
-   #      ...
-   #     "
-   #
-   CMD=$((grep -e config_opts\\[\'yum.conf\'\\\] $CFG \
-               -e config_opts\\[\'dnf.conf\'\\\] $CFG | \
-          sed 's#\\n#\n#g') | \
-         grep '^[[]' | \
-         grep -v main | \
-         sed -e 's/[][]//g' -e "s#^#${PKG_MANAGER} --disablerepo=* --enablerepo=#" -e 's#$# clean metadata#' | \
-         sort -u | \
-         tr '\n' ';')
-   echo "$MOCK --root $CFG --configdir $(dirname $CFG) --chroot bash -c $CMD" &> $TMP
-   trapwrap_n $CFG $MOCK --root $CFG --configdir $(dirname $CFG) --chroot "bash -c '($CMD)'" &>>$TMP
-   RC=$?
-   if [ $RC -ne 0 ]; then
-      cat $TMP
-   fi
-   \rm -f $TMP
-   return $RC
-}
-
-mock_clean_metadata () {
-   echo "${FUNCNAME[0]}: in"
-   mock_clean_metadata_cfg $BUILD_CFG
-   echo "${FUNCNAME[0]}: out"
-}
-
-update_cgcs_repo () {
-   local REPO_NAME=$1
-   (
-    cd $MY_REPO/$REPO_NAME/
-
-    local CURR_HEAD=$(git rev-parse HEAD)
-    local LAST_HEAD_FILE="$MY_REPO/$REPO_NAME/.last_head"
-    local LAST_HEAD_FILE_OLD="$MY_WORKSPACE/$REPO_NAME.last_head"
-    local CHANGED
-    local NEW_UNTRACKED
-    local NEED_REBUILD
-    local NEED_MOCK_CLEAN=0
-    local d
-
-    if [ -f LAST_HEAD_FILE_OLD -a ! -f LAST_HEAD_FILE ]; then
-       \cp LAST_HEAD_FILE_OLD LAST_HEAD_FILE
-    fi
-
-    local LAST_HEAD=$(cat $LAST_HEAD_FILE | head -n 1)
-
-    for d in "Binary" "Source"; do
-       NEED_REBUILD=0
-       if [ ! -d $d/repodata ]; then
-          NEED_REBUILD=1
-       fi
-       if [ "$CURR_HEAD" != "$LAST_HEAD" ]; then
-          NEED_REBUILD=1
-       fi
-
-       CHANGED=$(git diff --name-only | grep $d)
-       if [ "x$CHANGED" != "x" ]; then
-          NEED_REBUILD=1
-       fi
-
-       NEW_UNTRACKED=$(git ls-files . --exclude-standard --others | grep $d)
-       if [ "x$NEW_UNTRACKED" != "x" ]; then
-          NEED_REBUILD=1
-       fi
-
-       if [ $NEED_REBUILD -eq 1 ]; then
-          NEED_MOCK_CLEAN=1
-          echo ""
-          echo "Need to recreate $REPO_NAME/$d/repodata"
-          mkdir -p $d
-
-          if [ -d $d/repodata ]; then
-             update_repodata "$d"
-          else
-             recreate_repodata "$d"
-          fi
-
-          create_lst "$d"
-       fi
-    done
-    echo "$CURR_HEAD" > $LAST_HEAD_FILE
-    \cp $LAST_HEAD_FILE $LAST_HEAD_FILE_OLD
-    if [ $NEED_MOCK_CLEAN -eq 1 ]; then
-      echo ""
-      echo "Need to clean mock"
-      mock_clean
-      set_mock_symlinks $MY_BUILD_CFG
-    fi
-   )
-}
-
-mock_clean_mounts_dir () {
-   local MOUNT=$1
-   local RC
-
-   if [ "$MOUNT" == "" ]; then
-      return 1
-   fi
-   mount | grep "$MOUNT" >> /dev/null
-   if [ $? -eq 0 ]; then
-      RC=1
-      which mock_cache_umount >> /dev/null
-      if [ $? -eq 0 ]; then
-         echo "umount '$MOUNT'"
-         mock_cache_umount "$MOUNT"
-         if [ $? -eq 0 ]; then
-            RC=0
-         fi
-      fi
-      if [ $RC -eq 1 ]; then
-         echo "ERROR: Directory '$MOUNT' is already mounted and will cause a build failure within mock."
-         echo "Ask your system administrator to umount '$MOUNT'."
-         exit 1
-      fi
-   fi
-   return 0
-}
-
-mock_clean_mounts_cfg () {
-   local CFG=$1
-   local ROOT_DIR=$(mock_get_root_dir $CFG)
-   local YUM_CACHE_MOUNT=$(readlink -f "$ROOT_DIR/root/var/cache/yum")
-   local PROC_MOUNT=$(readlink -f "$ROOT_DIR/root/proc")
-   local SYS_MOUNT=$(readlink -f "$ROOT_DIR/root/sys")
-   local SHM_MOUNT=$(readlink -f "$ROOT_DIR/root/dev/shm")
-   local PTS_MOUNT=$(readlink -f "$ROOT_DIR/root/dev/pts")
-   local MOUNT
-
-   echo "${FUNCNAME[0]}: $CFG"
-   for MOUNT in "$YUM_CACHE_MOUNT" "$PROC_MOUNT" "$SYS_MOUNT" "$SHM_MOUNT" "$PTS_MOUNT"; do
-      mock_clean_mounts_dir "$MOUNT"
-   done
-}
-
-mock_clean_mounts () {
-   echo "${FUNCNAME[0]}: in"
-   mock_clean_mounts_cfg $BUILD_CFG
-   echo "${FUNCNAME[0]}: out"
-}
-
-clean_yum_cache_cfg () {
-   local CFG=$1
-   local CACHE_DIR=$(mock_get_cache_dir $CFG)
-   local ROOT_DIR=$(mock_get_root_dir $CFG)
-   local RC=0
-
-   echo "${FUNCNAME[0]}: $CFG"
-
-   local YUM_CACHE_MOUNT=$(readlink -f "$ROOT_DIR/root/var/cache/yum")
-   local YUM_CACHE_LOCK="$CACHE_DIR/mock/yum_cache/yumcache.lock"
-   # echo "clean_yum_cache YUM_CACHE_MOUNT='$YUM_CACHE_MOUNT' YUM_CACHE_LOCK='$YUM_CACHE_LOCK'"
-
-   if [ "$YUM_CACHE_MOUNT" != "" ]; then
-      mock_clean_mounts_dir "$YUM_CACHE_MOUNT"
-   fi
-
-   if [ -f "$YUM_CACHE_LOCK" ]; then
-      RC=1
-      which mock_cache_unlock >> /dev/null
-      if [ $? -eq 0 ]; then
-         mock_cache_unlock "$YUM_CACHE_LOCK"
-         if [ $? -eq 0 ]; then
-            RC=0
-         fi
-      fi
-      if [ $RC -eq 1 ]; then
-         echo "ERROR: File '$YUM_CACHE_LOCK' exists and will cause a build failure within mock."
-         echo "Ask your system administrator to delete '$YUM_CACHE_LOCK'."
-         exit 1
-      fi
-   fi
-   return $RC
-}
-
-
-clean_yum_cache () {
-   echo "${FUNCNAME[0]}: in"
-   clean_yum_cache_cfg $BUILD_CFG
-   echo "${FUNCNAME[0]}: out"
-}
-
-mock_update_cfg () {
-   local CFG=$1
-   echo "${FUNCNAME[0]}: $CFG"
-   echo "=================================="
-   set_mock_symlinks $CFG
-   echo "$MOCK --root $CFG --configdir $(dirname $CFG) --update"
-   trapwrap_n $CFG $MOCK --root $CFG --configdir $(dirname $CFG) --update
-   echo "=================================="
-}
-
-mock_init_cfg () {
-   local CFG=$1
-   echo "${FUNCNAME[0]}: $CFG"
-   echo "=================================="
-   set_mock_symlinks $CFG
-   echo "$MOCK --root $CFG --configdir $(dirname $CFG) --init"
-   trapwrap_n $CFG $MOCK --root $CFG --configdir $(dirname $CFG) --init
-   echo "=================================="
-}
-
-mock_update_or_init_cfg () {
-   local CFG=$1
-   local TMP
-   local RC
-   echo "${FUNCNAME[0]}: $CFG"
-   local ROOT_DIR=$(mock_get_root_dir $CFG)
-
-   TMP=$(mktemp /tmp/mock_update_or_init_cfg_XXXXXX)
-   if [ $? -ne 0 ]; then
-      echo "${FUNCNAME[0]}: mktemp failed"
-      return 1
-   fi
-   if [ -d $ROOT_DIR/root ]; then
-      echo "Updating the mock environment"
-      set_mock_symlinks $CFG
-      echo "$MOCK --root $CFG --configdir $(dirname $CFG) --update"
-      trapwrap_n $CFG $MOCK --root $CFG --configdir $(dirname $CFG) --update  &> $TMP
-      RC=$?
-   else
-      echo "Init the mock environment"
-      set_mock_symlinks $CFG
-      echo "$MOCK --root $CFG --configdir $(dirname $CFG) --init"
-      trapwrap_n $CFG $MOCK --root $CFG --configdir $(dirname $CFG) --init &> $TMP
-      RC=$?
-   fi
-   if [ $RC -ne 0 ]; then
-      cat $TMP
-   fi
-   \rm -f $TMP
-   return $RC
-}
-
-mock_update_or_init () {
-   echo "${FUNCNAME[0]}: in"
-   mock_update_or_init_cfg $BUILD_CFG
-   echo "${FUNCNAME[0]}: out"
-}
-
-if [ "x$PROJECT" == "x" ]; then
-    echo "PROJECT environmnet variable is not defined."
-    exit 1
-fi
-
-if [ "x$SRC_BUILD_ENVIRONMENT" == "x" ]; then
-    echo "SRC_BUILD_ENVIRONMENT environmnet variable is not defined."
-    exit 1
-fi
-
-NO_DESCENDANTS=0
-NO_REQUIRED=0
-NO_AUTOCLEAN=0
-NO_BUILD_INFO=0
-HELP=0
-CLEAN_FLAG=0
-FORMAL_FLAG=0
-CAREFUL=0
-DEP_TEST_FLAG=0
-
-# read the options
-TEMP=$(getopt -o h --long serial,std,rt,installer,containers,no-required,no-descendants,no-autoclean,no-build-info,dep-test,clean,formal,careful,help,layer: -n "$ME" -- "$@")
-
-if [ $? -ne 0 ]; then
-    usage
-    exit 1
-fi
-
-eval set -- "$TEMP"
-
-export BUILD_TYPE=std
-trap my_exit EXIT
-
-# extract options and their arguments into variables.
-while true ; do
-    case "$1" in
-        --careful) CAREFUL=1 ; shift ;;
-        --no-descendants) NO_DESCENDANTS=1 ; shift ;;
-        --no-required) NO_REQUIRED=1 ; shift ;;
-        --no-autoclean) NO_AUTOCLEAN=1; shift ;;
-        --no-build-info) NO_BUILD_INFO=1; shift ;;
-        --formal) FORMAL_FLAG=1; shift ;;
-        --std) BUILD_TYPE=std; shift ;;
-        --rt) BUILD_TYPE=rt; shift ;;
-        --installer) BUILD_TYPE=installer; shift ;;
-        --containers) BUILD_TYPE=containers; shift ;;
-        -h|--help) HELP=1 ; shift ;;
-        --clean) CLEAN_FLAG=1 ; shift ;;
-        --dep-test) DEP_TEST_FLAG=1 ; MAX_WORKERS=1; NO_DESCENDANTS=1; NO_REQUIRED=1; NO_BUILD_INFO=1; shift ;;
-        --serial) shift ;;
-        --layer) export LAYER=$2 ; shift ; shift ;;
-        --) shift ; break ;;
-        *) echo "Internal error!" ; exit 1 ;;
-    esac
-done
-
-# Reset variables
-if [ -n "$MY_WORKSPACE" ]; then
-   export MY_WORKSPACE_TOP=${MY_WORKSPACE_TOP:-$MY_WORKSPACE}
-   export MY_WORKSPACE=$MY_WORKSPACE_TOP/$BUILD_TYPE
-else
-   export MY_PATCH_WORKSPACE_TOP=${MY_PATCH_WORKSPACE_TOP:-$MY_PATCH_WORKSPACE}
-   export MY_PATCH_WORKSPACE=$MY_PATCH_WORKSPACE_TOP/$BUILD_TYPE
-fi
-
-export MY_BUILD_DIR_TOP=${MY_BUILD_DIR_TOP:-$MY_BUILD_DIR}
-export MY_BUILD_DIR=$MY_BUILD_DIR_TOP/$BUILD_TYPE
-
-export MY_BUILD_ENVIRONMENT_TOP=${MY_BUILD_ENVIRONMENT_TOP:-$MY_BUILD_ENVIRONMENT}
-export MY_BUILD_ENVIRONMENT=$MY_BUILD_ENVIRONMENT_TOP-$BUILD_TYPE
-
-export MY_SRC_RPM_BUILD_DIR=$MY_BUILD_DIR/rpmbuild
-export MY_BUILD_ENVIRONMENT_FILE=$MY_BUILD_ENVIRONMENT.cfg
-export MY_BUILD_CFG=$MY_WORKSPACE/$MY_BUILD_ENVIRONMENT_FILE
-export MY_MOCK_ROOT=$MY_WORKSPACE/mock/root
-
-IMAGE_INC_FILE="${MY_WORKSPACE}/image.inc"
-image_inc_list iso std ${DISTRO} > "${IMAGE_INC_FILE}"
-
-DEV_IMAGE_INC_FILE="${MY_WORKSPACE}/image-dev.inc"
-image_inc_list iso dev ${DISTRO} > "${DEV_IMAGE_INC_FILE}"
-
-for STREAM in stable dev; do
-    WHEELS_INC_FILE="${MY_WORKSPACE}/${DISTRO}_${STREAM}_wheels.inc"
-    wheels_inc_list ${STREAM} ${DISTRO} > "${WHEELS_INC_FILE}"
-done
-
-LAST_PLATFORM_RELEASE_FILE="$MY_BUILD_DIR/.platform_release"
-
-TARGETS=$@
-
-if [ $HELP -eq 1 ]; then
-    usage
-    exit 0
-fi
-
-if [ $FORMAL_FLAG -eq 1 ]; then
-   export FORMAL_BUILD=1
-fi
-
-SRC_ROOT="$MY_REPO"
-if [ "x$MY_REPO" == "x" ]; then
-   SRC_ROOT=$HOME
-fi
-
-BUILD_ROOT="$MY_WORKSPACE"
-if [ "x$MY_WORKSPACE" == "x" ]; then
-   BUILD_ROOT="$MY_PATCH_WORKSPACE"
-
-   if [ "x$MY_PATCH_WORKSPACE" == "x" ]; then
-       echo "ERROR: reqiure one of MY_WORKSPACE or MY_PATCH_WORKSPACE be defined"
-       exit 1
-   fi
-fi
-
-export BUILD_BASE="$BUILD_ROOT"
-export CCACHE_DIR="$BUILD_ROOT/.ccache"
-export RESULT_DIR="$BUILD_BASE/results"
-export SRC_BASE="$SRC_ROOT"
-export STX_BASE=$SRC_BASE/stx
-
-if [ "x$MY_SRC_RPM_BUILD_DIR" != "x" ]; then
-    RPM_BUILD_ROOT=$MY_SRC_RPM_BUILD_DIR
-else
-    RPM_BUILD_ROOT=$BUILD_BASE/rpmbuild
-fi
-
-RELEASE_INFO_FILE="$(get_release_info)"
-
-if [ -f "$RELEASE_INFO_FILE" ]; then
-   source "$RELEASE_INFO_FILE"
-else
-   echo "Warning: failed to find RELEASE_INFO_FILE=$RELEASE_INFO_FILE"
-fi
-
-if [ "x$PLATFORM_RELEASE" == "x" ]; then
-   echo "Warning: PLATFORM_RELEASE is not defined in $RELEASE_INFO_FILE"
-   PLATFORM_RELEASE="00.00"
-fi
-
-export RPM_BUILD_BASE="$RPM_BUILD_ROOT"
-export SRPM_OUT="$RPM_BUILD_BASE/SRPMS"
-export RPM_DIR="$RPM_BUILD_BASE/RPMS"
-export SPECS_DIR="$RPM_BUILD_BASE/SPECS"
-export SOURCES_DIR="$RPM_BUILD_BASE/SOURCES"
-export PLATFORM_RELEASE
-
-if [ ! -d $BUILD_BASE ]; then
-   if [ $CLEAN_FLAG -eq 1 ]; then
-       exit 0
-   fi
-   echo "ERROR: expected to find directory at '$BUILD_BASE'"
-   exit 1
-fi
-
-
-mkdir -p $RPM_BUILD_BASE
-if [ $? -ne 0 ]; then
-   echo "ERROR: Failed to create directory '$RPM_BUILD_BASE'"
-   exit 1
-fi
-
-mkdir -p $SRPM_OUT/repodata
-if [ $? -ne 0 ]; then
-   echo "ERROR: Failed to create directory '$SRPM_OUT/repodata'"
-   exit 1
-fi
-
-mkdir -p $RPM_DIR/repodata
-if [ $? -ne 0 ]; then
-   echo "ERROR: Failed to create directory '$RPM_DIR/repodata'"
-   exit 1
-fi
-
-if [ "x$MY_BUILD_CFG" == "x" ];then
-   echo "ERROR: reqiure MY_BUILD_CFG to be defined"
-   exit 1
-fi
-
-export BUILD_CFG="$MY_BUILD_CFG"
-
-# Place build-time environement variables in mock environment
-echo "FORMAL_BUILD=$FORMAL_BUILD"
-echo "modify-build-cfg $BUILD_CFG"
-${BUILD_RPMS_SERIAL_DIR}/modify-build-cfg $BUILD_CFG
-if [ $? -ne 0 ]; then
-       echo "Could not modifiy $BUILD_CFG";
-       exit 1
-fi
-
-if [ ! -f $BUILD_CFG ]; then
-   echo "ERROR: Mock config file not found at '$BUILD_CFG'"
-   exit 1
-fi
-
-# create temp dir
-export TMPDIR="$MY_WORKSPACE/tmp"
-mkdir -p "$TMPDIR"
-
-# Create symlinks from /var/... to /localdisk/loadbuild/... if on a build server
-
-set_mock_symlinks $MY_BUILD_CFG
-
-if [ $CLEAN_FLAG -eq 0 ]; then
-    ls $SRPM_OUT/*.src.rpm &>> /dev/null
-    if [ $? -ne 0 ]; then
-        echo "Nothing to build in '$SRPM_OUT'"
-        exit 0
-    fi
-fi
-
-ALL=0
-UNRESOLVED_TARGETS=" "
-
-if [ $DEP_TEST_FLAG -eq 1 ]; then
-    # we expect exactly one package
-    if [ $(echo $TARGETS | wc -w) -ne 1 ]; then
-        echo "ERROR: dependency testing requires exactly one package"
-        usage
-        exit 1
-    fi
-else
-    # we accept a list of packages, and no list implies all
-    if [ "x$TARGETS" == "x" ]; then
-        echo "make: all"
-        ALL=1
-    else
-        echo "make: $TARGETS"
-        UNRESOLVED_TARGETS="$TARGETS"
-    fi
-fi
-
-if [ "$BUILD_TYPE" != "std" ]; then
-    # This defines ...
-    #    STD_SRPM_PKG_NAME_TO_PATH
-    #    STD_SRPM_PKG_NAMES
-    srpm_build_std_dictionary $MY_WORKSPACE_TOP/std/rpmbuild/SRPMS
-fi
-
-# This defines ...
-#    SRPM_PKG_NAME_TO_PATH
-#    SRPM_PKG_NAMES
-srpm_build_dictionary $SRPM_OUT
-
-SRPMS_TO_COMPILE=()
-SRPMS_LIST=""
-RPMS_LIST=""
-
-clean_list () {
-   local SRPMS_LIST="$1"
-   local RPMS_LIST="$2"
-   local ALL=$3
-   local TARGET
-   local b
-   local d
-   local f
-   local n
-   local p
-   local r
-   local s
-   local sn
-   local t
-   local SPEC_DIR
-
-   echo "${FUNCNAME[0]}: '$SRPMS_LIST'  '$RPMS_LIST'  '$ALL'"
-   if [ $ALL -eq 1 ]; then
-       for r in $(find $RPM_DIR -name "*.rpm"); do
-           \rm -f -v $r
-       done
-
-       if [ $CLEAN_FLAG -eq 1 ]; then
-          for d in $(find $SPECS_DIR -type d); do
-             echo "rm -rf $d"
-              \rm -rf "$d" 2>> /dev/null
-          done
-       fi
-
-       for d in $(find $RESULT_DIR/$USER-* -maxdepth 1 -type d 2>> /dev/null); do
-           echo "rm -rf $d"
-           \rm -rf "$d" 2>> /dev/null
-       done
-   else
-       for s in $SRPMS_LIST; do
-           SPEC_DIR=$(spec_cache_dir_from_srpm $s)
-           sn=$(rpm_get_name $s)
-           update_spec_cache $s
-
-           TARGET=$(rpm -qp --qf '%{NAME}-%{VERSION}\n' "$s")
-           for d in $(find $RESULT_DIR/$USER-* -maxdepth 1 -name "$TARGET*" 2>> /dev/null); do
-               echo "rm -rf $d"
-               \rm -rf "$d" 2>> /dev/null
-           done
-
-           for p in $(ls -1 $SPEC_DIR/BUILDS); do
-               for r in $(find $RESULT_DIR/$USER-* $RPM_DIR -name "$p-*.rpm" 2>> /dev/null); do
-                   if [ -f $r ]; then
-                       n=$(rpm_get_name $r)
-                       if [ "$n" == "$p" ]; then
-                          if [[ "$r" == *.src.rpm ]]; then
-                              if [ "$n" != "$sn" ]; then
-                                 continue
-                              fi
-
-                              TARGET=$(rpm -qp --qf '%{NAME}-%{VERSION}\n' "$r")
-                              for d in $(find $RESULT_DIR/$USER-* -maxdepth 1 -name "$TARGET*" 2>> /dev/null); do
-                                  echo "rm -rf $d"
-                                  \rm -rf "$d" 2>> /dev/null
-                              done
-
-                          else
-                              rs=$(rpm_get_srpm $r)
-                              if [[ "$rs" != "$sn"-[0-9]* ]]; then
-                                  continue
-                              fi
-                          fi
-
-                          \rm -f -v $r
-                       fi
-                   fi
-               done
-           done
-
-           TARGET=$(rpm -qp --qf '%{NAME}-%{VERSION}\n' "$s")
-
-           if [ $CLEAN_FLAG -eq 1 ]; then
-               for d in $(find $SPECS_DIR -type d -name "$TARGET*" 2>> /dev/null); do
-                   echo "rm -rf $d"
-                    \rm -rf "$d" 2>> /dev/null
-               done
-           fi
-
-           for d in $(find $RESULT_DIR/$USER-* -maxdepth 1 -name "$TARGET*" 2>> /dev/null); do
-               echo "rm -rf $d"
-               \rm -rf "$d" 2>> /dev/null
-           done
-       done
-   fi
-
-   echo ""
-   echo "Cleaning repodata"
-   for d in $(find -L  $MY_WORKSPACE/rpmbuild $MY_WORKSPACE/results   -type d -name repodata); do
-      recreate_repodata $(dirname $d)
-      create_lst $(dirname $d)
-   done
-
-   echo ""
-   echo "Cleaning mock environment"
-   echo ""
-
-   if [ $ALL -eq 1 ]; then
-       # Wipe everything
-       if [ "x$RPM_DIR" != "x" ]; then
-           \rm -rf -v $RPM_DIR/* 2>> /dev/null
-       fi
-
-       \rm -f -v $RESULT_DIR/mockchain.log 2>> /dev/null
-       mock_clean
-   else
-       # If dependency test
-       if [ $DEP_TEST_FLAG -eq 1 ]; then
-           mock_clean
-       else
-           # Wipe only traces of what we built
-           mock_partial_clean "$SRPMS_LIST" "$RPMS_LIST"
-       fi
-   fi
-}
-
-echo "ALL=$ALL"
-(
-trap my_exit EXIT
-trap my_sigint INT
-trap my_sighup HUP
-echo "$CMDLINE"
-echo "ALL=$ALL"
-
-if [ $CLEAN_FLAG -eq 0 ]; then
-  if [ -d $RESULT_DIR ]; then
-    # in case previous build recieved a ctrl-C and didn't get a change to copy it's successful work into RPM_DIR
-    for d in $(find $RESULT_DIR -name '*.rpm' | grep -v '[.]src[.]rpm' | xargs --no-run-if-empty --max-args=1 dirname | sort -u); do
-        rsync -u $d/*.rpm $RPM_DIR
-    done
-    for d in $(find -L $RESULT_DIR  -type d -name repodata); do
-       update_repodata $(dirname $d)
-    done
-  fi
-fi
-
-spec_cache_dir_from_srpm () {
-   local SRPM=${1}
-   local SPEC_DIR=$(echo $SRPM | sed 's#/SRPMS/#/SPECS/#')
-   echo "$SPEC_DIR"
-}
-
-update_spec_cache () {
-   local SRPM=${1}
-   local SPEC_DIR=$(spec_cache_dir_from_srpm $SRPM)
-   local NEED_UPDATE=0
-
-   if [ ! -d $SPEC_DIR ]; then
-      mkdir -p  $SPEC_DIR
-      NEED_UPDATE=1
-   else
-      find "$SPEC_DIR" -name '*.spec' | grep 'spec' >> /dev/null
-      if [ $? -ne 0 ]; then
-         # No spec file
-         NEED_UPDATE=1
-      fi
-
-      find "$SPEC_DIR" -not -newermm "$SRPM" -name '*.spec' | grep -q 'spec'
-      if [ $? -eq 0 ]; then
-         # spec is older than src.rpm
-         NEED_UPDATE=1
-      fi
-   fi
-
-   if [ $NEED_UPDATE -ne 0 ]; then
-      (
-      cd $SPEC_DIR
-      \rm -rf BUILDS BUILDS_VR *.spec 2>> /dev/null
-      mkdir -p BUILDS
-      mkdir -p NAMES
-      mkdir -p SERVICES
-      mkdir -p BUILDS_VR
-      rpm2cpio $SRPM | cpio -civ '*.spec'
-      if [ $? -ne 0 ]; then
-         echo "ERROR: no spec file found in '$SRPM'"
-      fi
-      for f in $(find . -name '*.spec' | sort -V); do
-         touch $f
-         for p in $(spec_list_ver_rel_packages $f); do
-            touch "BUILDS_VR/$p"
-         done
-         for p in $(spec_list_packages $f); do
-            touch "BUILDS/$p"
-         done
-         for p in $(spec_find_tag Name $f 2>> /dev/null); do
-            touch "NAMES/$p"
-         done
-         for p in $(spec_find_global service $f 2>> /dev/null); do
-            touch "SERVICES/$p"
-         done
-      done
-      )
-   fi
-}
-
-# Find the list of packages we must compile
-
-echo "Find the list of packages we must compile"
-
-mkdir -p $MY_WORKSPACE/tmp/
-NEED_BUILD_DIR=$(mktemp -d $MY_WORKSPACE/tmp/$USER-$ME-need-build-XXXXXX)
-if [ $? -ne 0 ] || [ "x$NEED_BUILD_DIR" == "x" ]; then
-    echo "Failed to create temp directory under $MY_WORKSPACE/tmp"
-    exit 1
-fi
-
-UNRESOLVED_TARGETS_DIR=$(mktemp -d $MY_WORKSPACE/tmp/$USER-$ME-unresolved-XXXXXX)
-if [ $? -ne 0 ] || [ "x$UNRESOLVED_TARGETS_DIR" == "x" ]; then
-    echo "Failed to create temp directory under $MY_WORKSPACE/tmp"
-    exit 1
-fi
-
-for n in ${UNRESOLVED_TARGETS}; do
-    touch $UNRESOLVED_TARGETS_DIR/$n
-done
-
-PLATFORM_RELEASE_CHANGED=0
-if [ -f $LAST_PLATFORM_RELEASE_FILE ]; then
-    LAST_PLATFORM_RELEASE=$(cat $LAST_PLATFORM_RELEASE_FILE)
-    if [ "$LAST_PLATFORM_RELEASE" != "$PLATFORM_RELEASE" ]; then
-        PLATFORM_RELEASE_CHANGED=1
-    fi
-else
-    PLATFORM_RELEASE_CHANGED=1
-fi
-
-for n in "${SRPM_PKG_NAMES[@]}"; do
-
-    s=${SRPM_PKG_NAME_TO_PATH[$n]}
-    SPEC_DIR=$(spec_cache_dir_from_srpm $s)
-    update_spec_cache $s
-    # echo "$BASHPID: considering $n: $s, SPEC_DIR=$SPEC_DIR"
-    NEED_BUILD=0
-
-    if [ "x$TARGETS" == "x" ]; then
-        # We weren't given a list of build targets.
-        # Build anything missing or out of date.
-        NEED_BUILD=0
-        BN=$(basename ${s//.src.rpm/})
-
-        if [ -f $RESULT_DIR/$MY_BUILD_ENVIRONMENT/$BN/fail ]; then
-            echo "Found: $RESULT_DIR/$MY_BUILD_ENVIRONMENT/$BN/fail"
-            echo "Previous build of $BN failed"
-            NEED_BUILD=1
-        elif [ ! -f $RESULT_DIR/$MY_BUILD_ENVIRONMENT/$BN/success ]; then
-            echo "Not Found: $RESULT_DIR/$MY_BUILD_ENVIRONMENT/$BN/success"
-            echo "No previous build of $BN"
-            NEED_BUILD=1
-        else
-            LOCAL_RPMS_VRA_LIST=$(ls -1 $SPEC_DIR/BUILDS_VR | tr '\n' ' ')
-
-            for f in $LOCAL_RPMS_VRA_LIST; do
-                m=$(find $RPM_DIR/$f*rpm 2>> /dev/null | wc -l)
-                if [ $m -eq 0 ] && [ -f "$UNBUILT_PATTERN_FILE" ]; then
-                    echo $f | grep -f "$UNBUILT_PATTERN_FILE" >> /dev/null && m=1
-                    if [ $m -eq 1 ]; then
-                       echo "Excluding '$f' due to match in UNBUILT_PATTERN_FILE '$UNBUILT_PATTERN_FILE'"
-                       if [ -f "$IMAGE_INC_FILE" ] ; then
-                          for t in $(grep -v '^#' "$IMAGE_INC_FILE"); do
-                             ii=$(echo $f | grep "^$t-[0-9]" | wc -l)
-                             if [ $ii -gt 0 ]; then
-                                echo "Including '$f' due to match in IMAGE_INC_FILE '$IMAGE_INC_FILE' due to pattern '^$t-[0-9]'"
-                                m=0
-                                break
-                             fi
-                          done
-                       fi
-                    fi
-                fi
-
-                newer=$(find $RPM_DIR/$f*rpm -type f -not -newermm $s 2>> /dev/null | wc -l)
-                # echo "$m  $newer=find $RPM_DIR/$f*rpm -type f -not -newermm $s 2>> /dev/null | wc -l"
-                if [ $m -eq 0 ] || [ $newer -gt 0 ] || [ $CLEAN_FLAG -eq 1 ]; then
-                    if [ $newer -gt 0 ]; then
-                        echo "Including '$f' due to newer code"
-                        find $RPM_DIR/$f*rpm -type f -not -newermm $s
-                    else
-                        if [ $m -eq 0 ]; then
-                            echo "Including '$f' due to m=0"
-                        else
-                           if [ $CLEAN_FLAG -eq 1 ]; then
-                               echo "Including '$f' due to CLEAN_FLAG=1"
-                           fi
-                        fi
-                    fi
-                    NEED_BUILD=1
-                    break
-                fi
-            done
-        fi
-    else
-        # We were given a list of build targets,
-        # try to find packages matching that list.
-        NEED_BUILD=0
-        for f in $(find $SPEC_DIR/NAMES $SPEC_DIR/SERVICES $SPEC_DIR/BUILDS -type f 2>> /dev/null); do
-            b=$(basename $f)
-            for t in $TARGETS; do
-                if [[ ( "$b" == "$t" ) || ( ( "$BUILD_TYPE" == "rt" ) && ( "$b" == "$t-rt" ) ) ]]; then
-                    echo "Including named target '$f'"
-                    TARGET_FOUND=$t
-                    NEED_BUILD=1
-                    # UNRESOLVED_TARGETS=$(echo "$UNRESOLVED_TARGETS" | sed "s/\(^\|[[:space:]]\)$TARGET_FOUND\([[:space:]]\|$\)/ /g")
-                    if [ -f $UNRESOLVED_TARGETS_DIR/$TARGET_FOUND ]; then
-                        \rm -f $UNRESOLVED_TARGETS_DIR/$TARGET_FOUND
-                    fi
-                    break
-                fi
-            done
-        done
-    fi
-
-    if [ $NO_BUILD_INFO -eq 0 ]; then
-        if [ "$n" == "build-info" ]; then
-            echo "Including '$n' by default"
-            NEED_BUILD=1
-        fi
-    fi
-
-    if [ $PLATFORM_RELEASE_CHANGED -eq 1 ]; then
-        grep '%{platform_release}' $SPEC_DIR/*.spec >> /dev/null
-        if [ $? -eq 0 ]; then
-            echo "Including '$n' due to changed platform_release"
-            NEED_BUILD=1
-        fi
-    fi
-
-    if [ $NEED_BUILD -eq 1 ]; then
-        echo "found $n: $r"
-        SRPMS_TO_COMPILE+=("$n")
-    fi
-
-    \rm -rf $TMPDIR
-done
-
-UNRESOLVED_TARGETS=" "
-for n in $(ls -1 $UNRESOLVED_TARGETS_DIR); do
-    UNRESOLVED_TARGETS="$UNRESOLVED_TARGETS $n"
-done
-\rm -rf $NEED_BUILD_DIR
-\rm -rf $UNRESOLVED_TARGETS_DIR
-
-ORIG_SRPMS_TO_COMPILE=( ${SRPMS_TO_COMPILE[@]} )
-
-echo "SRPMS_TO_COMPILE = ${SRPMS_TO_COMPILE[@]}"
-
-
-# adding dependant packages
-if [ $CLEAN_FLAG -eq 0 ] && [ $NO_DESCENDANTS -eq 0 ] && [ -f $SRPM_DIRECT_DESCENDANTS_FILE ]; then
-   echo
-   echo "adding dependant packages"
-
-   # This array will accumulate a list of secondary build targets.
-   TRANSITIVE_SRPMS_TO_COMPILE=()
-
-   # Add packages that directly depend on the primary build targets in ORIG_SRPMS_TO_COMPILE
-   for n in ${ORIG_SRPMS_TO_COMPILE[@]}; do
-       needs=( $(grep "^$n;" "$SRPM_DIRECT_DESCENDANTS_FILE" | sed "s/$n;//" | sed 's/,/ /g'; alt_n=$(echo "$n" | sed 's#-rt$##'); if [ "$alt_n" != "$n" ]; then grep "^$alt_n;" "$SRPM_DIRECT_DESCENDANTS_FILE" | sed "s/$alt_n;//" | sed 's/,/ /g' | sed 's#\([^[:space:]]*\)#\1-rt#g'; fi ) )
-
-       # intersection of 'needs' and 'SRPM_PKG_NAMES' ... i.e. what should be compiled that we have source for
-       compilable_needs=( $(intersection needs SRPM_PKG_NAMES) )
-       TRANSITIVE_SRPMS_TO_COMPILE=( $(union compilable_needs TRANSITIVE_SRPMS_TO_COMPILE) )
-   done
-
-   # For non-std build, and if non specific build targets are named, then search all
-   # packages that we might build and check if they require a package that DID build
-   # in the std build.  If so build the package as a secondary target, even though the
-   # primary target was from a different build_type.
-   if [ "$BUILD_TYPE" != "std" ] && [ $ALL -eq 1 ] && [ -f $SRPM_TO_RPM_MAP_FILE ] && [ -f $SRPM_RPM_DIRECT_REQUIRES_FILE ]; then
-       # Test all that we can build ...
-       for n in ${SRPM_PKG_NAMES[@]}; do
-           contains ORIG_SRPMS_TO_COMPILE $n
-           if [ $? -eq 0 ]; then
-               # Already on the primary build list, skip it.
-               echo "skip $n"
-               continue
-           fi
-
-           STD_NEEDS_BUILD=0
-
-           # Iterate over all binary rpms names produce by the candidate package
-           for b in $(grep "^$n;" "$SRPM_TO_RPM_MAP_FILE" | sed "s/$n;//" | sed 's/,/ /g'); do
-               # find an rpm file with the rpm name we seek
-               for bp in $(find $RPM_DIR -name "$b-[0-9]*.rpm" | grep -v '.src.rpm'); do
-                   if [ "$b" != "$(rpm_get_name $bp)" ]; then
-                       # rpm name doesn't match
-                       continue
-                   fi
-
-                   # Iterate over binary rpms names required by the candidate package
-                   for r in $(grep "^$n;" "$SRPM_RPM_DIRECT_REQUIRES_FILE" | sed "s/$n;//" | sed 's/,/ /g'); do
-                       if [ $r == $n ]; then
-                           # Ignore self dependency
-                           continue
-                       fi
-
-                       # find a required rpm file with the rpm name we seek, AND is newer than the produced rpm file
-                       for rp in $(find $(echo $RPM_DIR | sed "s#/$BUILD_TYPE/#/std/#") -name "$r-[0-9]*.rpm" -newermm $bp | grep -v '.src.rpm'); do
-                           if [ "$r" != "$(rpm_get_name $rp)" ]; then
-                               # rpm name doesn't match
-                               continue
-                           fi
-
-                           # Ok, a required rpm is newer than a built rpm, we should rebuild!
-                           echo "rebuild '$n' due to newer '$r'"
-                           STD_NEEDS_BUILD=1
-                           break
-                       done
-                   done
-               done
-
-               # Avoid pointless processing if we already have a positive result.
-               if [ $STD_NEEDS_BUILD -eq 1 ]; then
-                   break
-               fi
-           done
-
-           if [ $STD_NEEDS_BUILD -eq 1 ]; then
-               # Compile is requires due to an updated required package in the std build.
-               # Add 'n' to array TRANSITIVE_SRPMS_TO_COMPILE.
-               TRANSITIVE_SRPMS_TO_COMPILE=( $(put TRANSITIVE_SRPMS_TO_COMPILE $n) )
-           fi
-       done
-   fi
-
-   # If the kernel or kernel-rt packages where absent from the primary build targets, but
-   # added as a secondary target, then make sure all out-of-tree kernel modules are also
-   # added.
-   for n in kernel kernel-rt; do
-       KERNEL_IN_ORIG=0
-       KERNEL_IN_TRANSITIVE=0
-       contains ORIG_SRPMS_TO_COMPILE "$n" && KERNEL_IN_ORIG=1
-       contains TRANSITIVE_SRPMS_TO_COMPILE "$n" && KERNEL_IN_TRANSITIVE=1
-       if [ $KERNEL_IN_TRANSITIVE -eq 1 ] && [ $KERNEL_IN_ORIG -eq 0 ]; then
-           needs=( $(grep "^$n;" "$SRPM_DIRECT_DESCENDANTS_FILE" | sed "s/$n;//" | sed 's/,/ /g'; alt_n=$(echo "$n" | sed 's#-rt$##'); if [ "$alt_n" != "$n" ]; then grep "^$alt_n;" "$SRPM_DIRECT_DESCENDANTS_FILE" | sed "s/$alt_n;//" | sed 's/,/ /g' | sed 's#\([^[:space:]]*\)#\1-rt#g'; fi ) )
-
-           # intersection of 'needs' and 'SRPM_PKG_NAMES' ... i.e. what should be compiled that we have source for
-           compilable_needs=( $(intersection needs SRPM_PKG_NAMES) )
-           TRANSITIVE_SRPMS_TO_COMPILE=( $(union compilable_needs TRANSITIVE_SRPMS_TO_COMPILE) )
-       fi
-   done
-
-   # Append the secondary targetc list to the primary list
-   SRPMS_TO_COMPILE=( $(union SRPMS_TO_COMPILE TRANSITIVE_SRPMS_TO_COMPILE) )
-   echo "SRPMS_TO_COMPILE = ${SRPMS_TO_COMPILE[@]}"
-fi
-
-
-MUST_SRPMS_TO_COMPILE=( ${SRPMS_TO_COMPILE[@]} )
-
-# adding required packages
-if [ $CLEAN_FLAG -eq 0 ] && [ "x$TARGETS" != "x" ] && [ $NO_REQUIRED -eq 0 ] && [ -f $SRPM_TRANSITIVE_REQUIRES_FILE ]; then
-   echo
-   echo "adding required packages"
-   TRANSITIVE_SRPMS_TO_COMPILE=()
-   for n in ${MUST_SRPMS_TO_COMPILE[@]}; do
-       needs=( $(grep "^$n;" "$SRPM_TRANSITIVE_REQUIRES_FILE" | sed "s/$n;//" | sed 's/,/ /g') )
-
-       # intersection of 'needs' and 'SRPM_PKG_NAMES' ... i.e. what should be compiled that we have source for
-       compilable_needs=( $(intersection needs SRPM_PKG_NAMES) )
-       TRANSITIVE_SRPMS_TO_COMPILE=( $(union compilable_needs TRANSITIVE_SRPMS_TO_COMPILE) )
-
-       for b in "${un[@]}"; do
-          echo $b
-       done
-   done
-
-   SRPMS_TO_COMPILE=( $(union TRANSITIVE_SRPMS_TO_COMPILE SRPMS_TO_COMPILE) )
-   echo "SRPMS_TO_COMPILE = ${SRPMS_TO_COMPILE[@]}"
-fi
-
-
-# Determine build order
-SRPMS_TO_COMPILE=( $(echo ${SRPMS_TO_COMPILE[@]} | sed 's/ /\n/g' | sort -u) )
-if [ $CLEAN_FLAG -eq 0 ]; then
-   echo
-   echo "Calculate optimal build order"
-   SRPMS_TO_COMPILE=( $(build_order SRPMS_TO_COMPILE) )
-   echo "SRPMS_TO_COMPILE = ${SRPMS_TO_COMPILE[@]}"
-fi
-
-
-# convert pkg names to paths, clean work dirs if needed
-echo
-echo "Mapping packages to src rpm paths"
-for n in ${SRPMS_TO_COMPILE[@]}; do
-    s=${SRPM_PKG_NAME_TO_PATH[$n]}
-    SPEC_DIR=$(spec_cache_dir_from_srpm $s)
-    update_spec_cache $s
-
-    SRPMS_LIST="$SRPMS_LIST $s"
-    # echo "SRPMS_LIST = $SRPMS_LIST"
-
-    TMP_RPMS_LIST=$(ls -1 $SPEC_DIR/BUILDS | tr '\n' ' ')
-    RPMS_LIST="$RPMS_LIST $TMP_RPMS_LIST"
-done
-echo
-
-CENTOS_REPO=centos-repo
-if [ ! -d ${MY_REPO}/${CENTOS_REPO} ]; then
-    CENTOS_REPO=cgcs-centos-repo
-    if [ ! -d ${MY_REPO}/${CENTOS_REPO} ]; then
-        echo "ERROR: directory ${MY_REPO}/centos-repo not found."
-        exit 1
-    fi
-fi
-
-if [ $CLEAN_FLAG -eq 0 ]; then
-   update_cgcs_repo ${CENTOS_REPO}
-fi
-
-mock_clean_mounts
-
-# clean work dirs if needed
-CLEAN_BEFORE_BUILD_SRPM_LIST=""
-CLEAN_BEFORE_BUILD_RPM_LIST=""
-if [ $CLEAN_FLAG -eq 0 ]; then
-    echo
-    echo "Calculating minimal clean list"
-    for nm in ${SRPMS_TO_COMPILE[@]}; do
-        MUST_CLEAN=0
-        contains MUST_SRPMS_TO_COMPILE $nm && MUST_CLEAN=1
-
-        s=${SRPM_PKG_NAME_TO_PATH[$nm]}
-        SPEC_DIR=$(spec_cache_dir_from_srpm $s)
-        update_spec_cache $s
-
-        LOCAL_RPMS_LIST=$(ls -1 $SPEC_DIR/BUILDS | tr '\n' ' ')
-        LOCAL_RPMS_VRA_LIST=$(ls -1 $SPEC_DIR/BUILDS_VR | tr '\n' ' ')
-
-        for f in $LOCAL_RPMS_VRA_LIST; do
-            m=$(find $RPM_DIR/$f*rpm 2>> /dev/null | wc -l)
-            if [ -f "$UNBUILT_PATTERN_FILE" ]; then
-                echo $f | grep -f "$UNBUILT_PATTERN_FILE" >> /dev/null && m=1
-            fi
-
-            n=$(find $RPM_DIR/$f*rpm -type f -not -newermm $s 2>> /dev/null | wc -l)
-            # echo "$n=find $RPM_DIR/$f*rpm -type f -not -newermm $s 2>> /dev/null | wc -l"
-            if [ $m -eq 0 ] || [ $n -gt 0 ] || [ $MUST_CLEAN -eq 1 ]; then
-                CLEAN_BEFORE_BUILD_SRPM_LIST="$CLEAN_BEFORE_BUILD_SRPM_LIST $s"
-                CLEAN_BEFORE_BUILD_RPM_LIST="$CLEAN_BEFORE_BUILD_RPM_LIST $LOCAL_RPMS_LIST"
-                break
-            fi
-        done
-    done
-fi
-
-
-if [ "$UNRESOLVED_TARGETS" != " " ]; then
-    if [ $CLEAN_FLAG -eq 0 ]; then
-        echo ""
-        echo "ERROR: failed to resolve build targets: $UNRESOLVED_TARGETS"
-        exit 1
-    fi
-fi
-
-echo "SRPMS_LIST = $SRPMS_LIST"
-echo "RPMS_LIST = $RPMS_LIST"
-
-
-echo
-if [ $CLEAN_FLAG -eq 0 ]; then
-   # pre-create these directories as $USER,
-   # else mock will create them as root and fails to clean them.
-   # Note: keep these in sync with mockchain-parallel!
-   mkdir -p $MY_WORKSPACE/mock
-   mkdir -p $MY_WORKSPACE/cache/mock
-
-   mock_update_or_init
-fi
-set_mock_symlinks $MY_BUILD_CFG
-
-echo
-echo "Cleaning"
-if [ $CLEAN_FLAG -eq 1 ]; then
-   # Clean what the user asked for
-   echo "========= clean_list '$SRPMS_LIST' '$RPMS_LIST' $ALL"
-   \rm -r -f -v $MY_WORKSPACE/mock-$USER-*
-   clean_list "$SRPMS_LIST" "$RPMS_LIST" "$ALL"
-
-   exit 0
-else
-   # Clean what we intend to build
-   if [ $NO_AUTOCLEAN -eq 1 ]; then
-      echo "no-autoclean was requested"
-   else
-      if [ "$CLEAN_BEFORE_BUILD_SRPM_LIST" != "" ]; then
-         echo "========= clean_list '$CLEAN_BEFORE_BUILD_SRPM_LIST' '$CLEAN_BEFORE_BUILD_RPM_LIST' 0"
-         clean_list "$CLEAN_BEFORE_BUILD_SRPM_LIST" "$CLEAN_BEFORE_BUILD_RPM_LIST" 0
-      fi
-   fi
-fi
-
-echo
-echo "Cleaning repodata"
-
-BUILD_ENVIRONMENT_DIR=$(basename $BUILD_CFG)
-BUILD_ENVIRONMENT_DIR=${BUILD_ENVIRONMENT_DIR%.*}
-LOCAL_URL=http://127.0.0.1:8088$BUILD_BASE/results/$BUILD_ENVIRONMENT_DIR/
-LOCAL_SRC_URL=http://127.0.0.1:8088$BUILD_BASE/rpmbuild/SRPMS/
-
-for d in $(find -L $RESULT_DIR  -type d -name repodata); do
-(cd $d/..
- if [ -f repodata/*comps*xml ]; then
-    \mv repodata/*comps*xml comps.xml
- fi
- \rm -rf repodata
-)
-done
-
-echo
-echo "Cleaning Metadata"
-
-MOCKCHAIN_LOG="$RESULT_DIR/mockchain.log"
-mkdir -p $RESULT_DIR
-touch $RESULT_DIR/build_start
-\rm -rf $MOCKCHAIN_LOG
-
-mock_clean_metadata
-
-echo
-echo "Building"
-
-recreate_repodata $BUILD_BASE/results/$BUILD_ENVIRONMENT_DIR
-
-CMD_PREFIX=""
-if [ -x /bin/ionice ]; then
-    CMD_PREFIX="nice -n 20 ionice -c Idle /bin/ionice "
-fi
-
-REAL_MOCKCHAIN=0
-MOCK_PASSTHROUGH=""
-MOCKCHAIN="mock"
-CHAIN_OPTION="--chain"
-if file $(which mockchain) | grep -q 'Python script'; then
-    REAL_MOCKCHAIN=1
-    MOCK_PASSTHROUGH="-m"
-    MOCKCHAIN="mockchain"
-    CHAIN_OPTION=""
-fi
-
-CMD_OPTIONS="$MOCK_PASSTHROUGH --no-clean $MOCK_PASSTHROUGH --no-cleanup-after"
-if [ $CAREFUL -eq 1 ]; then
-   CMD_OPTIONS="$MOCK_PASSTHROUGH --no-cleanup-after"
-fi
-if [ $REAL_MOCKCHAIN -eq 1 ]; then
-    CMD_OPTIONS+=" --log=$MOCKCHAIN_LOG"
-fi
-echo "CAREFUL=$CAREFUL"
-echo "CMD_OPTIONS=$CMD_OPTIONS"
-
-CMD="$CMD_PREFIX $MOCKCHAIN --root $BUILD_CFG --localrepo $BUILD_BASE --recurse --tmp_prefix=$USER --addrepo=$LOCAL_URL --addrepo=$LOCAL_SRC_URL $MOCK_PASSTHROUGH --rootdir=$BUILD_BASE/mock/root $CMD_OPTIONS $MOCK_PASSTHROUGH --rebuild"
-CMD_BUILD_LIST="$CHAIN_OPTION $SRPMS_LIST"
-echo ""
-echo "$CMD $MOCK_PASSTHROUGH --define='_tis_dist .tis' $MOCK_PASSTHROUGH --define='platform_release $PLATFORM_RELEASE' $CMD_BUILD_LIST"
-echo ""
-trapwrap stdbuf -o0 $CMD $MOCK_PASSTHROUGH --define="_tis_dist .tis" $MOCK_PASSTHROUGH --define="platform_release $PLATFORM_RELEASE" $CMD_BUILD_LIST
-MOCKCHAIN_RC=$?
-
-echo $PLATFORM_RELEASE > $LAST_PLATFORM_RELEASE_FILE
-
-for d in $(find $RESULT_DIR -name '*.rpm' | grep -v '[.]src[.]rpm' | xargs --max-args=1 dirname | sort -u); do
-    rsync -u $d/*.rpm $RPM_DIR
-done
-
-if [ $ALL -eq 1 ]; then
-    echo
-    echo "Auditing for obsolete srpms"
-    for r in $(find $RESULT_DIR $RPM_DIR -name '*.src.rpm'); do
-        (
-        f=$(basename $r)
-        if [ ! -f "$SRPM_OUT/$f" ]; then
-            \rm -fv $r
-        fi
-        ) &
-    done
-    echo "waiting for srpm audit to complete"
-    wait
-    echo "Auditing for obsolete rpms"
-    for r in $(find $RESULT_DIR $RPM_DIR -name '*.rpm' | grep -v 'src.rpm'); do
-        (
-        s=$(rpm_get_srpm $r)
-        if [ ! -f "$SRPM_OUT/$s" ]; then
-            echo "Failed to find '$SRPM_OUT/$s'"
-            \rm -fv $r
-        fi
-        ) &
-    done
-    echo "waiting for rpm audit to complete"
-    wait
-    echo "Audit complete"
-    echo ""
-fi
-
-if [ $MOCKCHAIN_RC -ne 0 ]; then
-   echo "ERROR: Failed to build rpms using '$CMD'"
-   exit 1
-fi
-
-echo "Recreate repodata"
-for d in $(find -L $MY_WORKSPACE/rpmbuild $MY_WORKSPACE/results  -type d -name repodata); do
-   update_repodata $(dirname "$d")
-   create_lst $(dirname "$d")
-done
-
-
-if [ -f $MOCKCHAIN_LOG ]; then
-    grep 'following pkgs could not be successfully built' $MOCKCHAIN_LOG >> /dev/null
-    if [ $? -eq 0 ]; then
-        FAILED_PKGS=""
-        for p in $(sed -n '/following pkgs could not be successfully built:/,/Results out to/p' $MOCKCHAIN_LOG | grep -v '*** Build Failed ***'  | sed 1d | sed '$ d' | cut -d ':' -f2-); do
-            PKG=$(basename $p)
-            FAILED_PKGS="$PKG  $FAILED_PKGS"
-        done
-        echo
-        echo "Failed to build packages:  $FAILED_PKGS"
-        exit 1
-    fi
-fi
-
-# If we're doing a nightly or formal build (i.e. not a developer build) then we
-# want to sign certain packages.  Note that only certain users (i.e. jenkins)
-# have the authority to requiest that packages be signed.
-#
-# Signing is not actually done on this server (the keys are kept safe on a
-# different server with very limited access) but we can invoke a script to
-# make calls to the signing server.  Note that this will NOT work if you are
-# not Jenkins and don't have access to the Jenkins cross server login keys.
-#
-# Note that both std and rt builds must be complete before invoking the signing
-# script
-if [ 0$FORMAL_BUILD -eq 1 ] && [ "$USER" == "jenkins" ]; then
-	if [ -e $MY_WORKSPACE_TOP/std ] && [ -e $MY_WORKSPACE_TOP/rt ]; then
-		# Create dir for log, if it doesn't exit
-		mkdir -p $MY_WORKSPACE_TOP/export
-		echo "We are jenkins, and we are trying to do a formal build -- calling signing server"
-		echo "  to sign boot RPMs with secure boot keys"
-
-		MY_WORKSPACE=$MY_WORKSPACE_TOP ${SIGN_SECURE_BOOT} > $MY_WORKSPACE_TOP/export/${SIGN_SECURE_BOOT_LOG} 2>&1
-		if [ $? -ne 0 ]; then
-			echo "Signing of packages failed -- see $MY_WORKSPACE_TOP/export/${SIGN_SECURE_BOOT_LOG}"
-			exit 1
-		fi
-	fi
-fi
-
-exit 0
-) 2>&1 | stdbuf -o0 awk '{ print strftime("%H:%M:%S"), $0; fflush(); }' | tee $(date "+$MY_WORKSPACE/build-rpms-serial_%Y-%m-%d_%H-%M-%S.log") ; exit ${PIPESTATUS[0]}
diff --git a/build-tools/build-srpms b/build-tools/build-srpms
deleted file mode 100755
index c0f1bafc..00000000
--- a/build-tools/build-srpms
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/bin/bash
-
-#
-# Copyright (c) 2018 Wind River Systems, Inc.
-#
-# SPDX-License-Identifier: Apache-2.0
-#
-
-#
-# Create src.rpm files from source, or from a downloaded tarball
-# or src.rpm plus our additional patches.
-#
-# This program is a wrapper around build-srpms-parallel and build-srpms-serial
-#
-# The location of packages to be build are identified by
-# <distro>_pkg_dirs[_<opt-build-type>] files located at the root of
-# any git tree (e.g. istx/stx-integ/centos_pkg_dirs).
-#
-# The build of an individual package is driven by its build_srpm.data
-# file plus a <pkg-name>.spec file or an srpm_path file.
-#
-
-DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-
-usage () {
-    echo ""
-    echo "Usage: "
-    echo "   Create source rpms:"
-    echo "   build-srpms [--serial] [args]"
-}
-
-SERIAL_FLAG=0
-
-for arg in "$@"; do
-    case "$1" in
-        --serial) SERIAL_FLAG=1 ;;
-    esac
-done
-
-which mock_tmpfs_umount >> /dev/null
-if [ $? -ne 0 ]; then
-    SERIAL_FLAG=1
-fi
-
-if [ $SERIAL_FLAG -eq 1 ]; then
-    echo "build-srpms-serial $@"
-    build-srpms-serial "$@"
-else
-    echo "build-srpms-parallel $@"
-    build-srpms-parallel "$@"
-fi
-
diff --git a/build-tools/build-srpms-common.sh b/build-tools/build-srpms-common.sh
deleted file mode 100644
index 02756b36..00000000
--- a/build-tools/build-srpms-common.sh
+++ /dev/null
@@ -1,106 +0,0 @@
-#
-# Copyright (c) 2018 Wind River Systems, Inc.
-#
-# SPDX-License-Identifier: Apache-2.0
-#
-
-#
-# Functions common to build-srpm-serial and build-srpm-parallel.
-#
-
-SRC_BUILD_TYPE_SRPM="srpm"
-SRC_BUILD_TYPE_SPEC="spec"
-SRC_BUILD_TYPES="$SRC_BUILD_TYPE_SRPM $SRC_BUILD_TYPE_SPEC"
-
-set_build_info () {
-    local info_file="$MY_WORKSPACE/BUILD_INFO"
-    local layer_prefix="${LAYER^^}_"
-    if [ "${LAYER}" == "" ]; then
-        layer_prefix=""
-    fi
-    mkdir -p "$(dirname ${info_file})"
-    echo "${layer_prefix}OS=\"centos\"" > "${info_file}"
-    echo "${layer_prefix}JOB=\"n/a\"" >> "${info_file}"
-    echo "${layer_prefix}BUILD_BY=\"${USER}\"" >> "${info_file}"
-    echo "${layer_prefix}BUILD_NUMBER=\"n/a\"" >> "${info_file}"
-    echo "${layer_prefix}BUILD_HOST=\"$(hostname)\"" >> "${info_file}"
-    echo "${layer_prefix}BUILD_DATE=\"$(date '+%Y-%m-%d %H:%M:%S %z')\"" >> "${info_file}"
-}
-
-
-str_lst_contains() {
-    TARGET="$1"
-    LST="$2"
-
-    if [[ $LST =~ (^|[[:space:]])$TARGET($|[[:space:]]) ]] ; then
-        return 0
-    else
-        return 1
-    fi
-}
-
-
-#
-# md5sums_from_input_vars <src-build-type> <srpm-or-spec-path> <work-dir>
-#
-# Returns md5 data for all input files of a src.rpm.
-# Assumes PKG_BASE, ORIG_SRPM_PATH have been defined and the
-# build_srpm.data file has already been sourced.
-#
-# Arguments:
-#   src-build-type: Any single value from $SRC_BUILD_TYPES.
-#                   e.g. 'srpm' or 'spec'
-#   srpm-or-spec-path: Absolute path to an src.rpm, or to a
-#                      spec file.
-#   work-dir: Optional working directory.  If a path is
-#             specified but does not exist, it will be created.
-#
-# Returns: output of md5sum command with canonical path names
-#
-md5sums_from_input_vars () {
-    local SRC_BUILD_TYPE="$1"
-    local SRPM_OR_SPEC_PATH="$2"
-    local WORK_DIR="$3"
-
-    local TMP_FLAG=0
-    local LINK_FILTER='[/]stx[/]downloads[/]'
-
-    if ! str_lst_contains "$SRC_BUILD_TYPE" "$SRC_BUILD_TYPES" ; then
-        >&2  echo "ERROR: $FUNCNAME (${LINENO}): invalid arg: SRC_BUILD_TYPE='$SRC_BUILD_TYPE'"
-        return 1
-    fi
-
-    if [ -z $WORK_DIR ]; then
-        WORK_DIR=$(mktemp -d /tmp/${FUNCNAME}_XXXXXX)
-        if [ $? -ne 0 ]; then
-            >&2  echo "ERROR: $FUNCNAME (${LINENO}): mktemp -d /tmp/${FUNCNAME}_XXXXXX"
-            return 1
-        fi
-        TMP_FLAG=1
-    else
-        mkdir -p "$WORK_DIR"
-        if [ $? -ne 0 ]; then
-            >&2  echo "ERROR: $FUNCNAME (${LINENO}): mkdir -p '$WORK_DIR'"
-            return 1
-        fi
-    fi
-
-    local INPUT_FILES_SORTED="$WORK_DIR/srpm_sorted_input.files"
-
-    # Create lists of input files (INPUT_FILES) and symlinks (INPUT_LINKS).
-    srpm_source_file_list "$SRC_BUILD_TYPE" "$SRPM_OR_SPEC_PATH" "$INPUT_FILES_SORTED"
-    if [ $? -eq 1 ]; then
-        return 1
-    fi
-
-    # Remove $MY_REPO prefix from paths
-    cat $INPUT_FILES_SORTED | xargs -d '\n'  md5sum | sed "s# $(readlink -f $MY_REPO)/# #"
-
-    if [ $TMP_FLAG -eq 0 ]; then
-        \rm -f $INPUT_FILES_SORTED
-    else
-        \rm -rf $WORK_DIR
-    fi
-
-    return 0
-}
diff --git a/build-tools/build-srpms-parallel b/build-tools/build-srpms-parallel
deleted file mode 100755
index f8e59e01..00000000
--- a/build-tools/build-srpms-parallel
+++ /dev/null
@@ -1,1605 +0,0 @@
-#!/bin/bash
-# set -x
-
-#
-# Copyright (c) 2018 Wind River Systems, Inc.
-#
-# SPDX-License-Identifier: Apache-2.0
-#
-
-#
-# Create src.rpm files from source, or from a downloaded tarball
-# or src.rpm plus our additional patches.
-#
-# This version tries to compile many packages in parallel.
-#
-# The location of packages to be build are identified by
-# <distro>_pkg_dirs[_<opt-build-type>] files located at the root of
-# any git tree (e.g. stx/integ/centos_pkg_dirs).
-#
-# The build of an individul package is driven by it's build_srpm.data
-# file plus a <pkg-name>.spec file or an srpm_path file.
-#
-
-export ME=$(basename "$0")
-CMDLINE="$ME $@"
-
-BUILD_SRPMS_PARALLEL_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}" )" )"
-source $BUILD_SRPMS_PARALLEL_DIR/git-utils.sh
-source $BUILD_SRPMS_PARALLEL_DIR/spec-utils
-source $BUILD_SRPMS_PARALLEL_DIR/srpm-utils
-source $BUILD_SRPMS_PARALLEL_DIR/classify
-source $BUILD_SRPMS_PARALLEL_DIR/build-srpms-common.sh
-source $BUILD_SRPMS_PARALLEL_DIR/image-utils.sh
-
-
-INITIAL_DIR=$(pwd)
-export DISTRO="centos"
-SRPM_SCRIPT="build_srpm"
-SRPM_DATA="build_srpm.data"
-PKG_DIRS_FILE="${DISTRO}_pkg_dirs"
-
-DEFAULT_SRPM_SCRIPT="$BUILD_SRPMS_PARALLEL_DIR/default_$SRPM_SCRIPT"
-SCRIPT_PATH="$DISTRO"
-DATA_PATH="$DISTRO"
-FILES_PATH="$DISTRO/files"
-PATCHES_PATH="$DISTRO/patches"
-ORIG_SPECS_PATH="$DISTRO"
-SRPM_LIST_PATH="$DISTRO/srpm_path"
-
-MIRROR_ROOT="$MY_REPO/${DISTRO}-repo"
-if [ ! -d ${MIRROR_ROOT} ]; then
-    # Old value... a temporary measure for backward compatibility
-    MIRROR_ROOT="$MY_REPO/cgcs-${DISTRO}-repo"
-    if [ ! -d ${MIRROR_ROOT} ]; then
-        MIRROR_ROOT="$MY_REPO/${DISTRO}-repo"
-    fi
-fi
-
-REPO_DOWNLOADS_ROOT="$MY_REPO"
-SRPM_REBUILT_LIST=""
-SRPM_FAILED_REBUILD_LIST=""
-
-STOP_SCHEDULING=0
-
-ABSOLUTE_MAX_WORKERS=8
-MAX_WORKERS=$(grep -c ^processor /proc/cpuinfo)
-if [ "$MAX_WORKERS" == "" ] || [ "$MAX_WORKERS" == "0" ]; then
-    MAX_WORKERS=1
-fi
-
-if [ $MAX_WORKERS -gt $ABSOLUTE_MAX_WORKERS ]; then
-    MAX_WORKERS=$ABSOLUTE_MAX_WORKERS
-fi
-      
-echo "MAX_WORKERS=$MAX_WORKERS"
-
-CREATEREPO=$(which createrepo_c)
-if [ $? -ne 0 ]; then
-   CREATEREPO="createrepo"
-fi
-
-#
-# Create a list of rpms in the directory
-#
-create_lst () {
-   local DIR=${1}
-
-       (cd $DIR
-        [ -f rpm.lst ] && \rm -rf rpm.lst
-        [ -f srpm.lst ] && \rm -rf srpm.lst
-        find . -name '*.rpm' -and -not -name '*.src.rpm' | sed 's#^[.][/]##' | sort > rpm.lst
-        find . -name '*.src.rpm' | sed 's#^[.][/]##' | sort > srpm.lst
-       )
-}
-
-
-usage () {
-    echo ""
-    echo "Usage: "
-    echo "   Create source rpms:"
-    echo "      $ME [--rt | --std | --installer | --containers] [--layer=<layer>] [--no-descendants] [--formal] [ list of package names ]"
-    echo ""
-    echo "   Delete source rpms, and the directories associated with it's creation:"
-    echo "   Note: does not clean an edit environment"
-    echo "      $ME --clean [--rt | --std | --installer | --containers]  [optional list of package names]"
-    echo ""
-    echo "   Extract an src.rpm into a pair of git trees to aid in editing it's contents,"
-    echo "   one for source code and one for metadata such as the spec file."
-    echo "   If --no-meta-patch is specified, then WRS patches are omitted."
-    echo "      $ME --edit [--rt | --std | --installer | --containers] [--no-meta-patch] [list of package names]"
-    echo ""
-    echo "   Delete an edit environment"
-    echo "      $ME --edit --clean [--rt | --std | --installer | --containers] [list of package names]"
-    echo ""
-    echo "   This help page"
-    echo "      $ME --help"
-    echo ""
-}
-
-
-spec_cache_dir_from_srpm () {
-   local SRPM=${1}
-   local SPEC_DIR=$(echo $SRPM | sed 's#/SRPMS/#/SPECS/#')
-   echo "$SPEC_DIR"
-}
-
-result_dir_from_srpm () {
-   local SRPM=$(basename ${1} | sed 's#.src.rpm$##')
-   local RESULT_DIR="$MY_WORKSPACE/results/$MY_BUILD_ENVIRONMENT/$SRPM"
-   echo "$RESULT_DIR"
-}
-
-
-# This function creates a bunch of subdirs in $MY_WORKSPACE and makes sure
-# that a $MY_BUILD_CFG file exists.
-#
-# The goal of this is to have a script do as much of the annoying
-# grunt-work so that the "how to build it" instructions aren't 200 lines
-create_output_dirs () {
-	# make sure variables are sane before continuing
-	# Note that $BUILD_ROOT contains either $MY_WORKSPACE or $MY_PATCH_WORKSPACE
-	if [ "x$BUILD_ROOT" == "x" ]; then
-		return
-	fi
-	if [ "x$MY_BUILD_CFG" == "x" ]; then
-		return
-	fi
-	if [ "x$MY_BUILD_DIR" == "x" ]; then
-		return
-	fi
-	if [ "x$MY_SRC_RPM_BUILD_DIR" == "x" ]; then
-		return
-	fi
-
-	# create output dirs
-	mkdir -p $MY_BUILD_DIR
-	mkdir -p $MY_SRC_RPM_BUILD_DIR
-	mkdir -p $MY_SRC_RPM_BUILD_DIR/SOURCES
-	mkdir -p $MY_SRC_RPM_BUILD_DIR/SPECS
-	mkdir -p $MY_SRC_RPM_BUILD_DIR/BUILD
-	mkdir -p $MY_SRC_RPM_BUILD_DIR/RPMS
-	mkdir -p $MY_SRC_RPM_BUILD_DIR/SRPMS
-
-	# create $MY_BUILD_CFG, if required
-	if [ ! -f $MY_BUILD_CFG ]; then
-           echo "FORMAL_BUILD=$FORMAL_BUILD"
-           echo "modify-build-cfg $MY_BUILD_CFG"
-           ${DIR}/modify-build-cfg $MY_BUILD_CFG
-           if [ $? -ne 0 ]; then
-               echo "Could not modifiy $MY_BUILD_CFG";
-               exit 1
-           fi
-	fi
-
-}
-
-NO_DESCENDANTS=0
-NO_BUILD_INFO=0
-HELP=0
-CLEAN_FLAG=0
-FORMAL_FLAG=0
-BUILD_TYPE_FLAG=0
-EDIT_FLAG=0
-NO_META_PATCH_FLAG=0
-
-# read the options
-TEMP=$(getopt -o h --long parallel,std,rt,installer,containers,no-descendants,no-meta-patch,no-build-info,help,formal,clean,edit,layer: -n "$ME" -- "$@")
-
-if [ $? -ne 0 ]; then
-    usage
-    exit 1
-fi
-
-eval set -- "$TEMP"
-
-export BUILD_TYPE=std
-
-# extract options and their arguments into variables.
-while true ; do
-    case "$1" in
-        --no-descendants) NO_DESCENDANTS=1 ; shift ;;
-        --no-build-info) NO_BUILD_INFO=1 ; shift ;;
-        -h|--help) HELP=1 ; shift ;;
-        --clean) CLEAN_FLAG=1 ; shift ;;
-        --formal) FORMAL_FLAG=1 ; shift ;;
-        --std) BUILD_TYPE_FLAG=1; BUILD_TYPE=std; shift ;;
-        --rt) BUILD_TYPE_FLAG=1; BUILD_TYPE=rt; shift ;;
-        --installer) BUILD_TYPE=installer; shift ;;
-        --containers) BUILD_TYPE=containers; shift ;;
-        --edit) EDIT_FLAG=1 ; shift ;;
-        --no-meta-patch) NO_META_PATCH_FLAG=1 ; shift ;;
-        --parallel) shift ;;
-        --layer) export LAYER=$2 ; shift ; shift ;;
-        --) shift ; break ;;
-        *) echo "Internal error!" ; exit 1 ;;
-    esac
-done
-
-# Reset variables
-if [ -n "$MY_WORKSPACE" ]; then
-   export MY_WORKSPACE_TOP=${MY_WORKSPACE_TOP:-$MY_WORKSPACE}
-   export MY_WORKSPACE=$MY_WORKSPACE_TOP/$BUILD_TYPE
-else
-   export MY_PATCH_WORKSPACE_TOP=${MY_PATCH_WORKSPACE_TOP:-$MY_PATCH_WORKSPACE}
-   export MY_PATCH_WORKSPACE=$MY_PATCH_WORKSPACE_TOP/$BUILD_TYPE
-fi
-
-export MY_BUILD_DIR_TOP=${MY_BUILD_DIR_TOP:-$MY_BUILD_DIR}
-export MY_BUILD_DIR=$MY_BUILD_DIR_TOP/$BUILD_TYPE
-
-export MY_BUILD_ENVIRONMENT_TOP=${MY_BUILD_ENVIRONMENT_TOP:-$MY_BUILD_ENVIRONMENT}
-export MY_BUILD_ENVIRONMENT=$MY_BUILD_ENVIRONMENT_TOP-$BUILD_TYPE
-
-export MY_BUILD_ENVIRONMENT_FILE=$MY_BUILD_ENVIRONMENT.cfg
-export MY_SRC_RPM_BUILD_DIR=$MY_BUILD_DIR/rpmbuild
-export MY_BUILD_CFG=$MY_WORKSPACE/$MY_BUILD_ENVIRONMENT_FILE
-export MY_MOCK_ROOT=$MY_WORKSPACE/mock/root
-
-if [ "$BUILD_TYPE" != "std" ]; then
-   PKG_DIRS_FILE="${DISTRO}_pkg_dirs_${BUILD_TYPE}"
-fi
-
-echo "CLEAN_FLAG=$CLEAN_FLAG"
-TARGETS=$@
-
-if [ $HELP -eq 1 ]; then
-    usage
-    exit 0
-fi
-
-if [ $FORMAL_FLAG -eq 1 ]; then
-   export FORMAL_BUILD="yes"
-fi
-
-if [ "x$TARGETS" == "x" ] && [ $EDIT_FLAG -eq 1 ]; then
-    echo "ERROR: $FUNCNAME (${LINENO}): a package name is required when --edit is specified"
-    usage
-    exit 0
-fi
-
-SRC_ROOT="$MY_REPO"
-if [ "x$MY_REPO" == "x" ]; then
-   SRC_ROOT=$INITIAL_DIR
-fi
-
-BUILD_ROOT="$MY_WORKSPACE"
-if [ "x$MY_WORKSPACE" == "x" ]; then
-   BUILD_ROOT="$MY_PATCH_WORKSPACE"
-
-   if [ "x$MY_PATCH_WORKSPACE" == "x" ]; then
-       echo "ERROR: $FUNCNAME (${LINENO}): require one of MY_WORKSPACE or MY_PATCH_WORKSPACE be defined"
-       exit 1
-   fi
-fi
-
-export CCACHE_DIR="$BUILD_ROOT/.ccache"
-export SRC_BASE="$SRC_ROOT"
-export STX_BASE="$SRC_BASE/stx"
-export CGCS_BASE="$STX_BASE"
-export DISTRO_REPO_BASE=$MIRROR_ROOT
-export SPECS_BASE="$ORIG_SPECS_PATH"
-export FILES_BASE="$FILES_PATH"
-export PATCHES_BASE="$PATCHES_PATH"
-
-export BUILD_BASE="$BUILD_ROOT"
-BUILD_INPUTS="$BUILD_BASE/inputs"
-SRPM_ASSEMBLE="$BUILD_BASE/srpm_assemble"
-SRPM_WORK="$BUILD_BASE/srpm_work"
-
-if [ "x$MY_SRC_RPM_BUILD_DIR" != "x" ]; then
-    RPM_BUILD_ROOT=$MY_SRC_RPM_BUILD_DIR
-else
-    RPM_BUILD_ROOT=$BUILD_BASE/rpmbuild
-fi
-
-create_output_dirs
-
-export RPM_BUILD_BASE="$RPM_BUILD_ROOT"
-export SRPM_OUT="$RPM_BUILD_BASE/SRPMS"
-export SOURCE_OUT="$RPM_BUILD_BASE/SOURCES"
-export RPM_DIR="$RPM_BUILD_BASE/RPMS"
-
-if [ ! -d $CGCS_BASE ]; then
-   echo "ERROR: $FUNCNAME (${LINENO}): expected to find directory at '$CGCS_BASE'"
-   exit 1
-fi
-
-if [ ! -d $BUILD_BASE ]; then
-   if [ $CLEAN_FLAG -eq 1 ]; then
-       exit 0
-   fi
-   echo "ERROR: $FUNCNAME (${LINENO}): expected to find directory at '$BUILD_BASE'"
-   exit 1
-fi
-
-RELEASE_INFO_FILE="$(get_release_info)"
-
-if [ -f "$RELEASE_INFO_FILE" ]; then
-   source "$RELEASE_INFO_FILE"
-else
-   echo "Warning: $FUNCNAME (${LINENO}): failed to find RELEASE_INFO_FILE=$RELEASE_INFO_FILE"
-fi
-
-if [ "x$PLATFORM_RELEASE" == "x" ]; then
-   echo "Warning: $FUNCNAME (${LINENO}): PLATFORM_RELEASE is not defined in $RELEASE_INFO_FILE"
-   PLATFORM_RELEASE=00.00
-fi
-
-export PLATFORM_RELEASE
-
-mkdir -p $RPM_BUILD_BASE
-if [ $? -ne 0 ]; then
-   echo "ERROR: $FUNCNAME (${LINENO}): Failed to create directory '$RPM_BUILD_BASE'"
-   exit 1
-fi
-
-mkdir -p $SRPM_OUT
-if [ $? -ne 0 ]; then
-   echo "ERROR: $FUNCNAME (${LINENO}): Failed to create directory '$SRPM_OUT'"
-   exit 1
-fi
-
-mkdir -p $RPM_DIR
-if [ $? -ne 0 ]; then
-   echo "ERROR: $FUNCNAME (${LINENO}): Failed to create directory '$RPM_DIR'"
-   exit 1
-fi
-
-mkdir -p $SRPM_ASSEMBLE
-if [ $? -ne 0 ]; then
-   echo "ERROR: $FUNCNAME (${LINENO}): Failed to create directory '$SRPM_ASSEMBLE'"
-   exit 1
-fi
-
-mkdir -p $BUILD_INPUTS
-if [ $? -ne 0 ]; then
-   echo "ERROR: $FUNCNAME (${LINENO}): Failed to create directory '$BUILD_INPUTS'"
-   exit 1
-fi
-
-build_dir () {
-   local build_idx=$1
-   local d=$2
-   local w=$3
-   export PKG_BASE=$d
-   export WORK_BASE=$w
-   export SPECS_BASE="$PKG_BASE/$ORIG_SPECS_PATH"
-   local RC
-
-   local ORIG_DIR=$(pwd)
-   # echo "build_dir: PKG_BASE=$PKG_BASE"
-
-   cd "$PKG_BASE"
-   if [ $? -ne 0 ]; then
-      echo "ERROR: $FUNCNAME (${LINENO}): failed to cd into '$PKG_BASE'"
-      return 1
-   fi
-
-   if [ ! -d $ORIG_SPECS_PATH ]; then
-      # nothing to do
-      echo "WARNING: '$ORIG_SPECS_PATH' not found in '$PKG_BASE'"
-      cd "$ORIG_DIR"
-      return 0
-   fi
-
-   SRPM_COUNT=0
-   ORIG_SRPM_PATH=""
-   if [ -f $SRPM_LIST_PATH ]; then
-      # we've found a file (ex centos/srpm_path) which lists a path to a source
-      # RPM file
-      #
-      # The specified file can be of the form
-      #
-      # repo:path/to/file.src.rpm
-      # mirror:path/to/file.src.rpm
-      # /path/to/file.rpm
-      # path/to/file.rpm
-      #
-      # If "repo:" is specified, then we search for the file relative to
-      # $REPO_DOWNLOADS_ROOT (i.e. a path to the file in a "downloads subgit)
-      #
-      # If "mirror:" is specified, then we search for the file relateive to
-      # $MIRROR_ROOT 
-      #
-      # An absolute path is parsed as an absolute path (mainly intended for
-      # developer/experimental use without checking in files or messing with
-      # your git repos)
-      #
-      # A lack of prefix (relative path name) is interpretted as "mirror:"
-      # (legacy support for existing packages)
-      #
-      # Other prefixes (file:, http:, whatever:)are unsupported at this time
-
-      for p in $(grep -v '^#' $SRPM_LIST_PATH | grep -v '^$'); do
-         # absolute path source rpms
-         echo "$p" | grep "^/" >/dev/null && ORIG_SRPM_PATH=$p
-
-         if [ "${ORIG_SRPM_PATH}x" == "x" ]; then
-            # handle repo: definitions
-            echo "$p" | grep "^repo:" >/dev/null && ORIG_SRPM_PATH=$(echo $p | sed "s%^repo:%$REPO_DOWNLOADS_ROOT/%")
-         fi
-
-         if [ "${ORIG_SRPM_PATH}x" == "x" ]; then
-            # handle mirror: definitions
-            echo "$p" | grep "^mirror:" >/dev/null && ORIG_SRPM_PATH=$(echo $p | sed "s%^mirror:%$MIRROR_ROOT/%" |  sed "s#CentOS/tis-r3-CentOS/kilo/##" | sed "s#CentOS/tis-r3-CentOS/mitaka/##")
-         fi
-
-         if [ "${ORIG_SRPM_PATH}x" == "x" ]; then
-            # we haven't found a valid prefix yet, so assume it's a legacy
-            # file (mirror: interpretation)
-            ORIG_SRPM_PATH="$MIRROR_ROOT/$p"
-         fi
-
-         # echo "ORIG_SRPM_PATH=$ORIG_SRPM_PATH"
-         if [ -f $ORIG_SRPM_PATH ]; then
-             SRPM_COUNT=$((SRPM_COUNT + 1))
-         else
-             echo "ERROR: $FUNCNAME (${LINENO}): Invalid srpm path '$p', evaluated as '$ORIG_SRPM_PATH', found in '$PKG_BASE/$SRPM_LIST_PATH'"
-             ORIG_SRPM_PATH=""
-             return 3
-         fi
-      done
-   fi
-
-   # Clean up an tmp_spec_*.spec file left by a prior failed build
-   for f in $(find $ORIG_SPECS_PATH -name 'tmp_spec_*.spec'); do 
-      \rm -f $f
-   done
-
-   SPEC_COUNT=$(find $ORIG_SPECS_PATH -name '*.spec' | wc -l)
-   if [ $SPEC_COUNT -eq 0 ]; then
-      if [ -f $ORIG_SPECS_PATH/spec_path ]; then
-         SPECS_BASE=$SRC_BASE/$(cat $SPECS_BASE/spec_path)
-         SPEC_COUNT=$(find $SPECS_BASE -maxdepth 1 -name '*.spec' | wc -l)
-      fi
-   fi
-
-   if [ $SPEC_COUNT -eq 0 ] && [ $SRPM_COUNT -eq 0 ]; then
-      # nothing to do
-      echo "ERROR: $FUNCNAME (${LINENO}): Neither srpm_path nor .spec file not found in '$PKG_BASE/$ORIG_SPECS_PATH'"
-      cd "$ORIG_DIR"
-      return 0
-   fi
-
-
-   if [ $SPEC_COUNT -gt 0 ] && [ $SRPM_COUNT -gt 0 ]; then
-      # nothing to do
-      echo "ERROR: $FUNCNAME (${LINENO}): Please provide only one of srpm_path or .spec files, not both, in '$PKG_BASE/$ORIG_SPECS_PATH'"
-      cd $ORIG_DIR
-      return 0
-   fi
-
-   if [  $SPEC_COUNT -gt 0 ]; then
-      build_dir_spec $build_idx
-      RC=$?
-      cd "$ORIG_DIR"
-      return $RC
-   else
-      build_dir_srpm $build_idx $ORIG_SRPM_PATH
-      RC=$?
-      cd "$ORIG_DIR"
-      return $RC
-   fi
-
-   cd "$ORIG_DIR"
-   return 0
-}
-
-
-clean_srpm_dir () {
-   local build_idx=$1
-   local DIR=$2
-   local EXCLUDE_MD5=$3
-
-   local SRPM_PATH
-   local SRPM_FILE
-   local SRPM_OUT_PATH
-   local SRPM_NAME
-   local SRPM_OUT_NAME
-   local INPUTS_TO_CLEAN=""
-
-   if [ "$EXCLUDE_MD5" == "" ]; then
-       EXCLUDE_MD5=0
-   fi
-
-   echo "clean_srpm_dir build_idx=$build_idx DIR=$DIR"
-
-   INPUTS_TO_CLEAN=$(dirname $(dirname $DIR))
-   echo "$INPUTS_TO_CLEAN" | grep -q "^$BUILD_INPUTS/"
-   if [ $? -ne 0 ] ; then
-       INPUTS_TO_CLEAN=""
-   fi
-
-   for SRPM_PATH in $(find "$DIR" -name '*.src.rpm'); do
-       SRPM_FILE=$(basename $SRPM_PATH)
-       SRPM_NAME=$(rpm -q --queryformat '%{NAME}\n' --nosignature -p $SRPM_PATH 2>> /dev/null)
-
-       if [ $CLEAN_FLAG -eq 1 ]; then
-         sed -i "/^$SRPM_NAME$/d" $MY_WORKSPACE/tmp/UNRESOLVED_TARGETS_${build_idx}
-       fi
-
-       \rm -fv $SRPM_PATH $SRPM_OUT/$SRPM_FILE  
-
-       if [ -d $SRPM_ASSEMBLE/$SRPM_NAME ]; then
-           echo "rm -rf $SRPM_ASSEMBLE/$SRPM_NAME"
-           \rm -rf $SRPM_ASSEMBLE/$SRPM_NAME
-       fi
-
-       if [ -d $SOURCE_OUT/$SRPM_FILE ]; then
-           echo "rm -rf $SOURCE_OUT/$SRPM_FILE"
-           \rm -rf $SOURCE_OUT/$SRPM_FILE
-       fi
-
-       if [ $EXCLUDE_MD5 -eq 0 ] && [ -d $SOURCE_OUT/$SRPM_NAME ]; then
-           echo "rm -rf $SOURCE_OUT/$SRPM_NAME"
-           \rm -rf $SOURCE_OUT/$SRPM_NAME
-       fi
-
-       local d
-       local src_d
-       local spec
-       local spec_name
-
-       for d in $(find $BUILD_INPUTS -type d -name "${SRPM_NAME}*") ;do
-           src_d=$(echo $d | sed "s#^$BUILD_INPUTS/#$MY_REPO/#")
-
-           for spec in $(find $src_d/${DISTRO} -name '*.spec'); do
-               spec_name=$(spec_find_tag Name $spec)
-               if [ "$spec_name" == "$SRPM_NAME" ]; then
-                   INPUTS_TO_CLEAN=$(if [ "x$INPUTS_TO_CLEAN" != "x" ]; then echo $INPUTS_TO_CLEAN; fi; echo "$d")
-               fi
-           done
-       done
-
-       # Look for older versions of the same src rpm that also need cleaning
-       for SRPM_OUT_PATH in $(ls -1 $SRPM_OUT/$SRPM_NAME*.src.rpm 2>> /dev/null); do
-           SRPM_OUT_FILE=$(basename $SRPM_OUT_PATH)
-           SRPM_OUT_NAME=$(rpm -q --queryformat '%{NAME}\n' -p $SRPM_OUT_PATH 2>> /dev/null)
-           if [ "$SRPM_NAME" == "$SRPM_OUT_NAME" ]; then
-              \rm -fv $SRPM_OUT_PATH
-              if [ -d $SOURCE_OUT/$SRPM_OUT_FILE ]; then
-                  echo "rm -rf $SOURCE_OUT/$SRPM_OUT_FILE"
-                  \rm -rf $SOURCE_OUT/$SRPM_OUT_FILE
-              fi
-           fi
-       done
-   done
-
-   if [ "x$INPUTS_TO_CLEAN" != "x" ]; then
-       for d in $INPUTS_TO_CLEAN; do
-           if [ -d $d/rpmbuild ]; then
-               echo "rm -rf $d"
-               \rm -rf $d
-           fi
-       done
-   fi
-}
-
-build_dir_srpm () {
-   local build_idx=$1
-   local ORIG_SRPM_PATH=$2
-
-   local ORIG_SRPM=$(basename $ORIG_SRPM_PATH)
-   local NAME=$(rpm -q --queryformat '%{NAME}\n' --nosignature -p $ORIG_SRPM_PATH)
-   local PKG_NAME_VER=$(rpm -q --queryformat '%{NAME}-%{VERSION}-%{RELEASE}\n' --nosignature -p $ORIG_SRPM_PATH)
-   local PKG_DIR="$NAME"
-   local TARGET_FOUND=""
-   local RC=0
-
-   export SRPM_EXPORT_NAME=$NAME
-   export SRPM_EXPORT_VER=$VER
-
-   local NEED_BUILD=0
-
-   if [ "x$TARGETS" == "x" ]; then
-      NEED_BUILD=1
-      TARGET_FOUND=$NAME
-   else
-      TARGET_LIST=( $TARGETS )
-      TARGET_FOUND=$(srpm_match_target_list TARGET_LIST  "$ORIG_SRPM_PATH" 2>> /dev/null)
-      if [ $? -eq 0 ]; then
-         echo "found target '$TARGET_FOUND' in '$ORIG_SRPM'"
-         NEED_BUILD=1
-         sed -i "/^$TARGET_FOUND$/d" $MY_WORKSPACE/tmp/UNRESOLVED_TARGETS_${build_idx}
-      fi
-   fi
-
-   if [ $NEED_BUILD -eq 0 ]; then
-      return 0
-   fi
-
-   local ROOT_DIR="$SRPM_ASSEMBLE"   
-   if [ $EDIT_FLAG -eq 1 ]; then
-      mkdir -p $SRPM_WORK
-      ROOT_DIR="$SRPM_WORK"
-   fi
-   local PKG_ROOT_DIR="$ROOT_DIR/$PKG_DIR"   
-   local BUILD_DIR="$PKG_DIR/rpmbuild"
-   local FULL_BUILD_DIR="$ROOT_DIR/$BUILD_DIR"
-   local SRPM_DIR="$FULL_BUILD_DIR/SRPMS"
-   local SOURCES_DIR="$SOURCE_OUT"
-   
-   if [ $CLEAN_FLAG -eq 1 ]; then
-      # clean
-      echo "===== Cleaning '$TARGET_FOUND' ====="
-
-      if [ -d $SRPM_DIR ] && [ $EDIT_FLAG -eq 0 ]; then
-         clean_srpm_dir $build_idx "$SRPM_DIR" 0
-      fi
-
-      if [ -d $PKG_ROOT_DIR ]; then
-          echo "rm -rf $PKG_ROOT_DIR"
-          \rm -rf "$PKG_ROOT_DIR"
-      fi
-   else
-      #build
-      echo "===== Build SRPM for '$TARGET_FOUND' ====="
-      echo "PKG_BASE=$PKG_BASE"
-      echo "BUILD_DIR=$BUILD_DIR"
-      echo "SRPM_DIR=$SRPM_DIR"
-
-      if [ ! -d $ROOT_DIR ]; then
-         mkdir -p "$ROOT_DIR"
-         if [ $? -ne 0 ]; then
-            echo "ERROR: $FUNCNAME (${LINENO}): mkdir '$ROOT_DIR' failed"
-            return 1
-         fi
-      fi
-
-      #
-      # Load data from build_srpm.data
-      #
-      export DATA="$DATA_PATH/$SRPM_DATA"
-
-      if [ -f "$DATA" ]; then
-          srpm_source_build_data "$DATA" "$SRC_BUILD_TYPE_SRPM" "$ORIG_SRPM_PATH"
-          if [ $? -ne 0 ]; then
-              echo "ERROR: $FUNCNAME (${LINENO}): failed to source $DATA"
-              return 1
-          fi
-      fi
-
-      #
-      # Capture md5 data for all input files
-      #
-      local TARGET_SOURCES_DIR="$SOURCES_DIR/$TARGET_FOUND"
-      local INPUT_FILES_MD5="$TARGET_SOURCES_DIR/srpm_input.md5"
-      local REFERENCE_MD5="$TARGET_SOURCES_DIR/srpm_reference.md5"
-
-      mkdir -p "$TARGET_SOURCES_DIR"
-      md5sums_from_input_vars "$SRC_BUILD_TYPE_SRPM" "$ORIG_SRPM_PATH" "$TARGET_SOURCES_DIR" > "$INPUT_FILES_MD5"
-      if [ $? -ne 0 ]; then
-         echo "ERROR: $FUNCNAME (${LINENO}): md5sums_from_input_vars '$SRC_BUILD_TYPE_SRPM' '$ORIG_SRPM_PATH' '$TARGET_SOURCES_DIR'"
-         return 1
-      fi
-      echo "Wrote: $INPUT_FILES_MD5"
-
-      #
-      # Is a rebuild required?
-      # Compare md5 of current inputs vs md5 of previous build?
-      #
-      local BUILD_NEEDED=0
-      local SRPM_OUT_PATH2
-      local DIFF_LINE
-      local DIFF_FILE
-
-      if [ -f $REFERENCE_MD5 ]; then
-         DIFF_LINE=$(diff "$INPUT_FILES_MD5" "$REFERENCE_MD5" | head -n 2 | tail -n 1; exit ${PIPESTATUS[0]})
-         if [ $? -ne 0 ]; then
-            DIFF_FILE=$(echo "$DIFF_LINE" | cut -d ' ' -f4-)
-            BUILD_NEEDED=1
-            case ${DIFF_LINE:0:1} in
-               '>') echo "Rebuild required due to deleted file: $DIFF_FILE" ;;
-               '<') echo "Rebuild required due to new or changed file: $DIFF_FILE" ;;
-               *)  echo "Rebuild required due to diff: $DIFF_LINE" ;;
-            esac
-         fi
-      else
-         echo "Rebuild required due to missing reference md5: $REFERENCE_MD5"
-         BUILD_NEEDED=1
-      fi
-
-      if [ -d "$FULL_BUILD_DIR/SRPMS" ]; then
-         b=""
-         for SRPM_PATH in $(find "$FULL_BUILD_DIR/SRPMS" -name '*.src.rpm' | sort -V); do
-            b=$(basename $SRPM_PATH)
-            SRPM_OUT_PATH2=$(find $SRPM_OUT -name $b)
-            if [ "x$SRPM_OUT_PATH2" == "x" ]; then
-               echo "Rebuild required due to missing srpm: $b"
-               BUILD_NEEDED=1
-            fi
-         done
-
-         if [ "$b" == "" ]; then
-            echo "Rebuild required due no src.rpm in directory: '$FULL_BUILD_DIR/SRPMS'"
-            BUILD_NEEDED=1
-         fi
-      else
-         echo "Rebuild required due to missing directory: '$FULL_BUILD_DIR/SRPMS'"
-         BUILD_NEEDED=1
-      fi
-   
-      if [ $BUILD_NEEDED -eq 0 ]; then
-         echo "SRPM build not required for '$PKG_BASE'"
-         echo "===== Build complete for '$TARGET_FOUND' ====="
-         echo
-         return 0
-      fi
-   
-      if [ $EDIT_FLAG -eq 0 ]; then
-         clean_srpm_dir $build_idx "$FULL_BUILD_DIR/SRPMS" 1
-
-         if [ -d $PKG_ROOT_DIR ]; then
-            echo "arf rm -rf $PKG_ROOT_DIR"
-            \rm -rf $PKG_ROOT_DIR
-         fi
-      fi
-
-      if [ $EDIT_FLAG -eq 1 ]; then
-         PKG_CLASSIFICATION=$(classify $PKG_BASE)
-         echo "$PKG_CLASSIFICATION = classify $PKG_BASE"
-         if [ "$PKG_CLASSIFICATION" == "spec + tarball" ] || [ "$PKG_CLASSIFICATION" == "srpm + patches" ]; then
-            echo "OK to edit $PKG_BASE"
-         else
-            echo "Can't edit this package, it is of type '$PKG_CLASSIFICATION', it is not derived from SRPM or tarball and patches"
-            return 1
-         fi
-
-         echo "srpm_extract_to_git '$ORIG_SRPM_PATH' '$PKG_BASE' '$ROOT_DIR' '$BUILD_DIR' '$PKG_NAME_VER' '$NO_META_PATCH_FLAG' '$TIS_PATCH_VER' '$PBR_VERSION'"
-         srpm_extract_to_git $ORIG_SRPM_PATH $PKG_BASE $ROOT_DIR $BUILD_DIR $PKG_NAME_VER $NO_META_PATCH_FLAG $TIS_PATCH_VER $PBR_VERSION
-         RC=$?
-         if [ $RC -ne 0 ]; then
-            if [ $RC -eq 1 ]; then
-               echo "ERROR: $FUNCNAME (${LINENO}): failed to extract srpm '$ORIG_SRPM_PATH'"
-            fi
-            return $RC
-         fi
-
-         local LOC
-         LOC=$(git_list_containing_tag "${PKG_ROOT_DIR}/gits" "pre_wrs_$PKG_NAME_VER" | head -n 1 )
-         echo "===== '$TARGET_FOUND' has been extracted for editing. ====="
-         echo "===== Metadata can be found at: $PKG_ROOT_DIR/rpmbuild"
-         echo "===== Source code can be found at: $LOC"
-         return 0
-      fi
-
-      #
-      # Find age of youngest input file.
-      # We will apply this as the creation/modification timestamp of the src.rpm we produce.
-      #
-      AGE=$(find $PKG_BASE $ORIG_SRPM_PATH ! -path '*/.git/*' ! -path '*/.tox/*' -type f -exec stat --format '%Y' "{}" \; | sort -nr | head -n 1)
-      if [ -f $PKG_BASE/$DATA ]; then
-         AGE2=$(
-               cd $PKG_BASE
-               srpm_source_build_data "$DATA" "$SRC_BUILD_TYPE_SRPM" "$ORIG_SRPM_PATH"
-               PATH_LIST=""
-
-               # NOTE: SRC_DIR is not honored in this build path
-      
-               if [ "x$COPY_LIST" != "x" ]; then
-                  PATH_LIST="$PATH_LIST $COPY_LIST"
-               fi
-      
-               # NOTE: COPY_LIST_TO_TAR is not honored in this build path
-
-      
-               if [ "x$PATH_LIST" == "x" ]; then
-                  echo "0"
-               else
-                  AGE2=$(find -L $PATH_LIST ! -path '*/.git/*' ! -path '*/.tox/*' -type f -exec stat --format '%Y' "{}" \; | sort -nr | head -n 1)
-                  echo  "$AGE2"
-               fi
-               )
-          if [ $AGE2 -gt $AGE ]; then
-             AGE=$AGE2
-          fi
-      fi
-      
-      srpm_extract $ORIG_SRPM_PATH $PKG_BASE $ROOT_DIR $BUILD_DIR $PKG_NAME_VER
-      if [ $? -ne 0 ]; then
-         echo "ERROR: $FUNCNAME (${LINENO}): failed to extract srpm '$ORIG_SRPM_PATH'"
-         return 1
-      fi
-
-      if [ "x$COPY_LIST" != "x" ]; then
-         echo "COPY_LIST: $COPY_LIST"
-         for p in $COPY_LIST; do
-            # echo "COPY_LIST: $p"
-            \cp -L -r -f -v $p $FULL_BUILD_DIR/SOURCES
-            if [ $? -ne 0 ]; then
-               echo "ERROR: $FUNCNAME (${LINENO}): COPY_LIST: file not found: '$p'"
-               return 1
-            fi
-         done
-      fi
-
-      srpm_assemble $FULL_BUILD_DIR $TIS_PATCH_VER $PBR_VERSION
-      if [ $? -ne 0 ]; then
-         echo "ERROR: $FUNCNAME (${LINENO}): failed to assemble srpm for '$PKG_NAME_VER'"
-
-         echo "$TARGET_FOUND" >> $MY_WORKSPACE/tmp/SRPM_FAILED_REBUILD_LIST_${build_idx}
-         return 1
-      fi
-
-      TS=$(date -d @$AGE +%Y-%m-%dT%H:%M:%S)
-      for s in $(find $FULL_BUILD_DIR/SRPMS -name '*.src.rpm'); do
-         \cp -L -f -v $s $SRPM_OUT/
-         ss=$(basename $s)
-         touch $SRPM_OUT/$ss --date=$TS
-
-         mkdir -p $SOURCES_DIR/$ss
-         BIG_FLAG_FILE="$SOURCES_DIR/$ss/BIG"
-         SLOW_FLAG_FILE="$SOURCES_DIR/$ss/SLOW"
-
-         if [ $BUILD_IS_BIG -gt 0 ]; then
-             echo "$BUILD_IS_BIG" > $BIG_FLAG_FILE
-         else
-             if [ -f $BIG_FLAG_FILE ]; then
-                 \rm -f $BIG_FLAG_FILE
-             fi
-         fi
-
-         if [ $BUILD_IS_SLOW -gt 0 ]; then
-             echo "$BUILD_IS_SLOW" > $SLOW_FLAG_FILE
-         else
-             if [ -f $SLOW_FLAG_FILE ]; then
-                 \rm -f $SLOW_FLAG_FILE
-             fi
-         fi
-
-         \rm -f -v "$REFERENCE_MD5"
-         \mv -v "$INPUT_FILES_MD5" "$REFERENCE_MD5"
-
-         local SPEC_DIR=$(spec_cache_dir_from_srpm $SRPM_OUT/$ss)
-         if [ -d $SPEC_DIR/BUILDS_VR ]; then
-            for f in $(ls -1 $SPEC_DIR/BUILDS_VR); do
-                for r in $(find  $RPM_DIR -name "$f*rpm" 2>> /dev/null); do
-                   \rm -f -v $r
-                done
-            done
-         fi
-
-         local RESULT_DIR=$(result_dir_from_srpm $SRPM_OUT/$ss)
-         if [ -d $RESULT_DIR ]; then
-             echo "rm -rf $RESULT_DIR"
-             \rm -rf $RESULT_DIR
-         fi
-      done
-
-      echo "$TARGET_FOUND" >> $MY_WORKSPACE/tmp/SRPM_REBUILT_LIST_${build_idx}
-      echo "SRPM build successful for '$PKG_NAME_VER'"
-      echo "===== Build complete for '$TARGET_FOUND' ====="
-      echo
-
-   fi
-
-   return 0
-}
-
-
-build_dir_spec () {
-   local build_idx=$1
-
-   local NEED_BUILD=0
-   local TARGET_FOUND=""
-
-   if [ "x$TARGETS" == "x" ]; then
-      NEED_BUILD=1
-      for f in $(find $SPECS_BASE -maxdepth 1 -name '*.spec'); do
-         TARGET_FOUND=$(spec_find_tag Name "$f" 2>> /dev/null)
-         if [ $? -ne 0 ]; then
-             TARGET_FOUND=$(spec_find_global service "$f" 2>> /dev/null)
-             if [ $? -ne 0 ]; then
-                 TARGET_FOUND=""
-             fi
-         fi
-      done
-   else
-      TARGET_LIST=( $TARGETS )
-      for f in $(find $SPECS_BASE -maxdepth 1 -name '*.spec' 2>> /dev/null); do
-         TARGET_FOUND=$(spec_match_target_list TARGET_LIST "$f" 2>> /dev/null)
-         if [ $? -eq 0 ]; then
-            echo "found target '$TARGET_FOUND' in '$f'"
-            NEED_BUILD=1
-            sed -i "/^$TARGET_FOUND$/d" $MY_WORKSPACE/tmp/UNRESOLVED_TARGETS_${build_idx}
-            break
-         fi
-      done
-   fi
-
-   if [ $NEED_BUILD -eq 1 ]; then
-      MAKE_SRPM="$SCRIPT_PATH/$SRPM_SCRIPT"
-      export DATA="$DATA_PATH/$SRPM_DATA"
-
-      export RPMBUILD_BASE="$WORK_BASE/rpmbuild"
-      SRPM_PATH="$RPMBUILD_BASE/SRPMS"
-      SPEC_PATH="$RPMBUILD_BASE/SPECS"
-      SOURCES_PATH="$RPMBUILD_BASE/SOURCES"
-      local ROOT_DIR="$RPMBUILD_BASE"
-      local PKG_ROOT_DIR="$RPMBUILD_BASE"
-      local SPEC=$(find $SPECS_BASE -maxdepth 1 -name '*.spec' | head -n 1)
-      local NAME=$(spec_find_tag Name $SPEC)
-      local PKG_NAME_VER=$(spec_name_ver_rel $SPEC)
-      local PKG_DIR="$NAME"
-      local BUILD_DIR="$PKG_DIR/rpmbuild"
-      local FULL_BUILD_DIR="$ROOT_DIR"
-      local SRPM_DIR="$FULL_BUILD_DIR/SRPMS"
-      local SOURCES_DIR="$SOURCE_OUT"
-
-      if [ $EDIT_FLAG -eq 1 ]; then
-         mkdir -p $SRPM_WORK
-         ROOT_DIR="$SRPM_WORK"
-         PKG_ROOT_DIR="$ROOT_DIR/$PKG_DIR"   
-      fi
-
-      if [ $CLEAN_FLAG -eq 1 ]; then
-         # clean
-         echo "===== Cleaning '$TARGET_FOUND' ====="
-         if [ -d $SRPM_PATH ] && [ $EDIT_FLAG -eq 0 ]; then
-            clean_srpm_dir $build_idx $SRPM_PATH 0
-         fi
-
-         if [ -d $PKG_ROOT_DIR ]; then
-             echo "rm -rf $PKG_ROOT_DIR"
-             \rm -rf "$PKG_ROOT_DIR"
-         fi
-      else
-         # build
-         echo "===== Build SRPM for '$TARGET_FOUND' ====="
-         echo "PKG_BASE=$PKG_BASE"
-         echo "WORK_BASE=$WORK_BASE"
-         echo "RPMBUILD_BASE=$RPMBUILD_BASE"
-         if [ ! -x $MAKE_SRPM ]; then
-            if [ ! -f $DATA ]; then
-                echo "expected to find an executable script at '$MAKE_SRPM' or data for the default script at '$DATA'"
-                cd $INITIAL_DIR
-                exit 1
-            else
-                MAKE_SRPM="$DEFAULT_SRPM_SCRIPT"
-            fi
-         fi
-   
-         #
-         # Load data from build_srpm.data
-         #
-
-         srpm_source_build_data "$DATA" "$SRC_BUILD_TYPE_SPEC" "$SPEC"
-         if [ $? -ne 0 ]; then
-             echo "ERROR: $FUNCNAME (${LINENO}): failed to source $DATA"
-             return 1
-         fi
-
-         #
-         # Capture md5 data for all input files
-         #
-         local TARGET_SOURCES_DIR="$SOURCES_DIR/$TARGET_FOUND"
-         local INPUT_FILES_MD5="$TARGET_SOURCES_DIR/srpm_input.md5"
-         local REFERENCE_MD5="$TARGET_SOURCES_DIR/srpm_reference.md5"
-
-         mkdir -p "$TARGET_SOURCES_DIR"
-         md5sums_from_input_vars "$SRC_BUILD_TYPE_SPEC" "$SPEC" "$TARGET_SOURCES_DIR" > "$INPUT_FILES_MD5"
-         if [ $? -ne 0 ]; then
-             echo "ERROR: $FUNCNAME (${LINENO}): md5sums_from_input_vars '$SRC_BUILD_TYPE_SPEC' '$SPEC' '$TARGET_SOURCES_DIR'"
-             return 1
-         fi
-         echo "Wrote: $INPUT_FILES_MD5"
-
-         #
-         # Is a rebuild required?
-         # Compare md5 of current inputs vs md5 of previous build?
-         #
-         local BUILD_NEEDED=0
-         local SRPM_OUT_PATH2
-         local DIFF_LINE
-         local DIFF_FILE
-
-         if [ -f $REFERENCE_MD5 ]; then
-            DIFF_LINE=$(diff "$INPUT_FILES_MD5" "$REFERENCE_MD5" | head -n 2 | tail -n 1; exit ${PIPESTATUS[0]})
-            if [ $? -ne 0 ]; then
-               DIFF_FILE=$(echo "$DIFF_LINE" | cut -d ' ' -f4-)
-               BUILD_NEEDED=1
-               case ${DIFF_LINE:0:1} in
-                  '>') echo "Rebuild required due to deleted file: $DIFF_FILE" ;;
-                  '<') echo "Rebuild required due to new or changed file: $DIFF_FILE" ;;
-                  *)  echo "Rebuild required due to diff: $DIFF_LINE" ;;
-               esac
-            fi
-         else
-            echo "Rebuild required due to missing reference md5: $REFERENCE_MD5"
-            BUILD_NEEDED=1
-         fi
-
-         if [ -d "$FULL_BUILD_DIR/SRPMS" ]; then
-            if [ -d "$RPMBUILD_BASE/SRPMS" ]; then
-               b=""
-               for SRPM_PATH2 in $(find "$RPMBUILD_BASE/SRPMS" -name '*.src.rpm' | sort -V); do
-                  b=$(basename $SRPM_PATH2)
-                  SRPM_OUT_PATH2=$(find $SRPM_OUT -name $b)
-                  if [ "x$SRPM_OUT_PATH2" == "x" ]; then
-                     echo "Rebuild required due to missing srpm: $b"
-                     BUILD_NEEDED=1
-                  fi
-               done
-               if [ "$b" == "" ]; then
-                   echo "Rebuild required due no src.rpm found in directory: '$RPMBUILD_BASE/SRPMS'"
-                   BUILD_NEEDED=1
-               fi
-            else
-               echo "Rebuild required due to missing directory: '$RPMBUILD_BASE/SRPMS'"
-               BUILD_NEEDED=1
-            fi
-         else
-            echo "Rebuild required due to missing directory: '$FULL_BUILD_DIR/SRPMS'"
-            BUILD_NEEDED=1
-         fi
-   
-         if [ $EDIT_FLAG -eq 1 ]; then
-
-            PKG_CLASSIFICATION=$(classify $PKG_BASE)
-            echo "$PKG_CLASSIFICATION = classify $PKG_BASE"
-            if [ "$PKG_CLASSIFICATION" == "spec + tarball" ] || [ "$PKG_CLASSIFICATION" == "srpm + patches" ]; then
-               echo "OK to edit $PKG_BASE"
-            else
-               echo "Can't edit this package, it is of type '$PKG_CLASSIFICATION', it is not derived from SRPM or tarball and patches"
-               return 1
-            fi
-
-            echo "tar_and_spec_extract_to_git '$SPEC' '$PKG_BASE' '$ROOT_DIR' '$BUILD_DIR' '$PKG_NAME_VER' '$NO_META_PATCH_FLAG' '$TIS_PATCH_VER' '$PBR_VERSION'"
-            tar_and_spec_extract_to_git "$SPEC" "$PKG_BASE" "$ROOT_DIR" "$BUILD_DIR" "$PKG_NAME_VER" "$NO_META_PATCH_FLAG" "$TIS_PATCH_VER" "$PBR_VERSION"
-            RC=$?
-            if [ $RC -ne 0 ]; then
-               if [ $RC -eq 1 ]; then
-                  echo "ERROR: $FUNCNAME (${LINENO}): failed to extract srpm '$ORIG_SRPM_PATH'"
-               fi
-               return $RC
-            fi
-   
-            local LOC
-            LOC=$(git_list_containing_branch "${PKG_ROOT_DIR}/gits" "${PKG_NAME_VER}" | head -n 1 )
-            echo "===== '$TARGET_FOUND' has been extracted for editing. ====="
-            echo "===== Metadata can be found at: $PKG_ROOT_DIR/rpmbuild"
-            echo "===== Source code can be found at: $LOC"
-            return 0
-         fi
-
-         if [ $BUILD_NEEDED -eq 0 ]; then
-            echo "SRPM build not required for '$PKG_BASE'"
-            echo "===== Build complete for '$TARGET_FOUND' ====="
-            echo
-            return 0
-         fi
-
-         export SRC_BUILD_TYPE="$SRC_BUILD_TYPE_SPEC"
-         export SRPM_OR_SPEC_PATH="$SPEC"
-
-         echo "MAKE_SRPM=$MAKE_SRPM"
-         echo "DATA=$DATA"
-         echo "SRC_BUILD_TYPE=$SRC_BUILD_TYPE"
-         echo "SRPM_OR_SPEC_PATH=$SRPM_OR_SPEC_PATH"
-   
-         if [ -d "$RPMBUILD_BASE/SRPMS" ]; then
-             clean_srpm_dir $build_idx "$RPMBUILD_BASE/SRPMS" 1
-         fi
-
-         if [ -d $RPMBUILD_BASE ]; then
-             echo "rm -rf $RPMBUILD_BASE"
-             \rm -rf "$RPMBUILD_BASE"
-         fi
-
-         echo "mkdir -p $WORK_BASE $SRPM_PATH $SPEC_PATH $SOURCES_PATH"
-         mkdir -p "$WORK_BASE" && \
-         mkdir -p "$SRPM_PATH" && \
-         mkdir -p "$SPEC_PATH" && \
-         mkdir -p "$SOURCES_PATH" 
-         if [ $? -ne 0 ]; then
-             echo "ERROR: $FUNCNAME (${LINENO}): Failed to create directories under: $WORK_BASE"
-         fi
-
-         \cp -L -f -v $SPECS_BASE/*.spec $SPEC_PATH/
-         if [ $? -ne 0 ]; then
-             echo "ERROR: $FUNCNAME (${LINENO}): Failed to copy spec files from '$SPECS_BASE' to '$SPEC_PATH'"
-         fi
-
-         #
-         # build
-         #
-         $MAKE_SRPM
-         if [ $? -ne 0 ]; then
-            echo "ERROR: $FUNCNAME (${LINENO}): script failed '$MAKE_SRPM'"
-            echo "$TARGET_FOUND" >> $MY_WORKSPACE/tmp/SRPM_FAILED_REBUILD_LIST_${build_idx}
-            exit 1
-         fi
-
-         #
-         # Find age of youngest input file.
-         # We will apply this as the creation/modification timestamp of the src.rpm we produce.
-         #
-         AGE=$(find $PKG_BASE ! -path '*/.git/*' ! -path '*/.tox/*' -type f -exec stat --format '%Y' "{}" \; | sort -nr | head -n 1)
-         if [ -f $PKG_BASE/$DATA ]; then
-            AGE2=$(
-                  cd $PKG_BASE
-                  srpm_source_build_data "$DATA" "$SRC_BUILD_TYPE_SPEC" "$SPEC"
-                  PATH_LIST=""
-                  if [ "x$SRC_DIR" != "x" ]; then
-                     if [ -d "$SRC_DIR" ]; then
-                        PATH_LIST="$PATH_LIST $SRC_DIR"
-                     fi
-                  fi
-
-                  if [ "x$COPY_LIST" != "x" ]; then
-                     PATH_LIST="$PATH_LIST $COPY_LIST"
-                  fi
-
-                  if [ "x$COPY_LIST_TO_TAR" != "x" ]; then
-                     PATH_LIST="$PATH_LIST $COPY_LIST_TO_TAR"
-                  fi
-
-                  if [ "x$PATH_LIST" == "x" ]; then
-                     echo "0"
-                  else
-                     AGE2=$(find -L $PATH_LIST ! -path '*/.git/*' ! -path '*/.tox/*' -type f -exec stat --format '%Y' "{}" \; | sort -nr | head -n 1)
-                     echo  "$AGE2"
-                  fi
-                  )
-             if [ $AGE2 -gt $AGE ]; then
-                AGE=$AGE2
-             fi
-         fi
-
-         TS=$(date -d @$AGE +%Y-%m-%dT%H:%M:%S)
-         for s in $(find $SRPM_PATH -name '*.src.rpm'); do
-             \cp -L -f $s $SRPM_OUT/
-             ss=$(basename $s)
-             touch $SRPM_OUT/$ss --date=$TS
-
-             mkdir -p $SOURCES_DIR/$ss
-             BIG_FLAG_FILE="$SOURCES_DIR/$ss/BIG"
-             SLOW_FLAG_FILE="$SOURCES_DIR/$ss/SLOW"
-             
-             if [ $BUILD_IS_BIG -gt 0 ]; then
-                 echo $BUILD_IS_BIG >  $BIG_FLAG_FILE
-             else
-                 if [ -f $BIG_FLAG_FILE ]; then
-                     \rm -f $BIG_FLAG_FILE
-                 fi    
-             fi
-
-             if [ $BUILD_IS_SLOW -gt 0 ]; then
-                 echo $BUILD_IS_SLOW > $SLOW_FLAG_FILE
-             else
-                 if [ -f $SLOW_FLAG_FILE ]; then
-                     \rm -f $SLOW_FLAG_FILE
-                 fi
-             fi
-
-             \rm -f -v "$REFERENCE_MD5"
-             \mv -v "$INPUT_FILES_MD5" "$REFERENCE_MD5"
-
-             local SPEC_DIR=$(spec_cache_dir_from_srpm $SRPM_OUT/$ss)
-             if [ -d $SPEC_DIR/BUILDS_VR ]; then
-                for f in $(ls -1 $SPEC_DIR/BUILDS_VR); do
-                    for r in $(find  $RPM_DIR -name "$f*rpm" 2>> /dev/null); do
-                       \rm -f -v $r
-                    done
-                done
-             fi
-
-             local RESULT_DIR=$(result_dir_from_srpm $SRPM_OUT/$ss)
-             if [ -d $RESULT_DIR ]; then
-                 echo "rm -rf $RESULT_DIR"
-                 \rm -rf $RESULT_DIR
-             fi
-         done
-
-         echo "$TARGET_FOUND" >> $MY_WORKSPACE/tmp/SRPM_REBUILT_LIST_${build_idx}
-         echo "===== Build complete for '$TARGET_FOUND' ====="
-         echo
-      fi
-   fi
-
-   return 0
-}
-
-(
-echo "$CMDLINE"
-
-if [ -L $BUILD_ROOT/repo ]; then
-    REPO_DEST=$(readlink $BUILD_ROOT/repo)
-    if [ "$REPO_DEST" != "$SRC_ROOT" ]; then
-        echo "Error: MY_REPO changed since last build"
-        echo "   old path: $REPO_DEST"
-        echo "   new path: $SRC_ROOT"
-        echo "Please run '$ME --clean' if you want to compile from a new source tree"
-        exit 1
-    fi
-fi
-
-if [ ! -L $BUILD_ROOT/repo ]; then
-    ln -s $SRC_ROOT $BUILD_ROOT/repo
-fi
-
-ALL=0
-UNRESOLVED_TARGETS=""
-if [ "x$TARGETS" == "x" ]; then
-    echo "make: all"
-    ALL=1
-else
-    echo "make: $TARGETS"
-    UNRESOLVED_TARGETS="$TARGETS"
-fi
-
-workers=0
-max_workers=$MAX_WORKERS
-declare -A build_env
-
-init_build_env () {
-    local i=0
-    local stop=$((max_workers-1))
-    for i in $(seq 0 $stop); do
-       build_env[$i]='Idle'
-    done
-}
-
-init_build_env
-
-get_idle_build_env () {
-    local i=0
-    local stop=$((max_workers-1))
-    if [ $stop -ge 255 ]; then
-        stop=254
-    fi
-    for i in $(seq 0 $stop); do
-        if [ ${build_env[$i]} == 'Idle' ]; then
-            build_env[$i]='Busy'
-            return $i
-        fi
-    done
-    return 255
-}
-
-set_build_env_pid () {
-    local idx=$1
-    local val=$2
-    build_env[$idx]=$val
-}
-
-release_build_env () {
-    local idx=$1
-    build_env[$idx]='Idle'
-}
-
-reaper ()  {
-    local reaped=0
-    local last_reaped=-1
-    local i=0
-    local stop=$((max_workers-1))
-    local p=0
-    local ret=0
-
-    if [ $stop -ge 255 ]; then
-        stop=254
-    fi
-  
-    while [ $reaped -gt $last_reaped ]; do
-        last_reaped=$reaped
-        for i in $(seq 0 $stop); do
-            p=${build_env[$i]}
-            if [ "$p" == "Idle" ] || [ "$p" == "Busy" ]; then
-                continue
-            fi
-            # echo "test $i $p"
-            kill -0 $p &> /dev/null
-            if [ $? -ne 0 ]; then
-                wait $p
-                ret=$?
-                workers=$((workers-1))
-                reaped=$((reaped+1))
-                release_build_env $i 
-                if [ $ret -ne 0 ]; then
-                   VERB="build"
-
-                   if [ $EDIT_FLAG -eq 1 ]; then
-                      VERB="edit"
-                      if [ $CLEAN_FLAG -eq 1 ]; then
-                         VERB="edit clean"
-                      fi
-                   else
-                      if [ $CLEAN_FLAG -eq 1 ]; then
-                         VERB="clean"
-                      fi
-                   fi
-
-                   sleep 1
-                   echo "ERROR: $FUNCNAME (${LINENO}): Failed to $VERB src.rpm from source at 'b$i'"
-                   cat "$LOG_DIR/$i" >> $LOG_DIR/errors
-                   echo "ERROR: $FUNCNAME (${LINENO}): Failed to $VERB src.rpm from source at 'b$i'" >> $LOG_DIR/errors
-                   echo "" >> $LOG_DIR/errors
-                   STOP_SCHEDULING=1
-               fi
-            fi
-        done
-    done
-    return $reaped
-}
-
-
-# Set up files to collect parallel build results ...
-mkdir -p $MY_WORKSPACE/tmp
-fn="$MY_WORKSPACE/tmp/UNRESOLVED_TARGETS_merge"
-
-if [ -f $fn ]; then
-   \rm -f $fn
-fi
-
-for n in $UNRESOLVED_TARGETS; do
-   echo $n >> $fn;
-done
-
-if [ -f $fn ]; then
-   sort $fn > $MY_WORKSPACE/tmp/UNRESOLVED_TARGETS
-else
-   \rm -f -v $MY_WORKSPACE/tmp/UNRESOLVED_TARGETS
-   touch $MY_WORKSPACE/tmp/UNRESOLVED_TARGETS
-fi
-
-for i in $(seq 0 $((max_workers-1))); do
-   for fn in $MY_WORKSPACE/tmp/SRPM_REBUILT_LIST_$i $MY_WORKSPACE/tmp/SRPM_FAILED_REBUILD_LIST_$i $MY_WORKSPACE/tmp/UNRESOLVED_TARGETS_$i; do
-      if [ -f $fn ]; then
-         \rm -f -v $fn
-      fi
-   done
-   \cp $MY_WORKSPACE/tmp/UNRESOLVED_TARGETS $MY_WORKSPACE/tmp/UNRESOLVED_TARGETS_$i
-done
-
-# create a build info
-if [ $CLEAN_FLAG -eq 0 ] && [ $EDIT_FLAG -eq 0 ] && [ $NO_BUILD_INFO -eq 0 ]; then
-    set_build_info
-fi
-
-# Build src.rpm's
-LOG_DIR=$(mktemp -d $MY_WORKSPACE/tmp/$USER-$ME-log-XXXXXX)
-if [ "x$LOG_DIR" == "x" ]; then
-    echo "failed to create temporary directory"
-    exit 1;
-fi
-
-for GIT_ROOT in $GIT_LIST; do
-   export GIT_BASE="$GIT_ROOT"
-   if [ $STOP_SCHEDULING -eq 1 ]; then
-       break;
-   fi
-
-   if echo "$GIT_ROOT" | grep do-not-build; then
-       continue
-   fi
-
-   for p in $(sed 's/#.*//' $GIT_ROOT/$PKG_DIRS_FILE 2>> /dev/null); do
-      if [ $STOP_SCHEDULING -eq 1 ]; then
-          break;
-      fi
-      src_dir="$GIT_ROOT/$p"
-      if [ -d $src_dir ]; then
-         if [ -d $src_dir/${DISTRO} ]; then
-            rel_dir=$(echo $src_dir | sed "s:^$SRC_BASE::")
-            work_dir="$BUILD_INPUTS$rel_dir"
-
-            # Free up a worker
-            while [ $workers -ge $max_workers ]; do
-                reaper
-                reaped=$?
-                if [ $reaped -eq 0 ]; then
-                    sleep 0.1
-                fi
-            done
-
-            workers=$((workers+1))
-            get_idle_build_env
-            b=$?
-            if [ $b -ge 255 ]; then
-               echo "get_idle_build_env failed to find a free slot"
-               exit 1
-            fi
-            PREFIX="b$b"
-            ( build_dir $b $src_dir $work_dir 2>&1 | sed "s#^#${PREFIX}: #"  | tee $LOG_DIR/$b; exit ${PIPESTATUS[0]} ) &
-            pp=$!
-            set_build_env_pid $b $pp
-         else
-            echo "ERROR: $FUNCNAME (${LINENO}): Failed to find 'centos' in '$p', found in file '$GIT_ROOT/$PKG_DIRS_FILE'"
-         fi
-      else
-         echo "ERROR: $FUNCNAME (${LINENO}): Bad path '$p' in file '$GIT_ROOT/$PKG_DIRS_FILE'"
-      fi
-   done
-done
-
-# Wait for remaining workers to exit
-while [ $workers -gt 0 ]; do
-    reaper
-    reaped=$?
-    if [ $reaped -eq 0 ]; then
-        sleep 0.1
-    fi
-done
-
-if [ $STOP_SCHEDULING -eq 1 ]; then
-    echo "============ Build failed ============="
-    if [ -f $LOG_DIR/errors ]; then
-        cat $LOG_DIR/errors
-    fi
-    \rm -rf $LOG_DIR
-    exit 1
-fi
-\rm -rf $LOG_DIR
-
-# Transfer results from files back into variables
-SRPM_REBUILT_LIST=$((for i in $(seq 0 $((max_workers-1))); do 
-                        fn=$MY_WORKSPACE/tmp/SRPM_REBUILT_LIST_$i 
-                        if [ -f $fn ]; then 
-                           cat $fn | tr '\n' ' '
-                        fi
-                     done) | sed 's/ $//')
-
-SRPM_FAILED_REBUILD_LIST=$((for i in $(seq 0 $((max_workers-1))); do 
-                               fn=$MY_WORKSPACE/tmp/SRPM_FAILED_REBUILD_LIST_$i
-                               if [ -f $fn ]; then 
-                                  cat $fn | tr '\n' ' '
-                               fi
-                            done) | sed 's/ $//')
-
-UNRESOLVED_TARGETS=$(for i in $(seq 0 $((max_workers-1))); do
-                        if [ -f $MY_WORKSPACE/tmp/UNRESOLVED_TARGETS_$i ]; then
-                           comm -1 -2 $MY_WORKSPACE/tmp/UNRESOLVED_TARGETS $MY_WORKSPACE/tmp/UNRESOLVED_TARGETS_$i > $MY_WORKSPACE/tmp/UNRESOLVED_TARGETS_merge
-                           \mv $MY_WORKSPACE/tmp/UNRESOLVED_TARGETS_merge $MY_WORKSPACE/tmp/UNRESOLVED_TARGETS
-                        fi
-                     done
-                     cat $MY_WORKSPACE/tmp/UNRESOLVED_TARGETS | tr '\n' ' ' | sed 's/ $//')
-
-\rm -rf $MY_WORKSPACE/tmp/SRPM_REBUILT_LIST_* $MY_WORKSPACE/tmp/SRPM_FAILED_REBUILD_LIST_* $MY_WORKSPACE/tmp/UNRESOLVED_TARGETS* 2>> /dev/null
-
-# Try to find and clean orphaned and discontinued .src.rpm's 
-if [ $ALL -eq 1 ]; then
-    echo
-    echo "Auditing for obsolete srpms"
-    AUDIT_DIR=$(mktemp -d $MY_WORKSPACE/tmp/$USER-$ME-audit-XXXXXX)
-    if [ $? -eq 0 ] && [ "x$AUDIT_DIR" != "x" ]; then
-    for GIT_ROOT in $GIT_LIST; do
-        if echo "$GIT_ROOT" | grep -q do-not-build; then
-            continue
-        fi
-
-        for p in $(cat $GIT_ROOT/$PKG_DIRS_FILE 2>> /dev/null); do
-            (
-            src_dir="$GIT_ROOT/$p"
-            if [ -d $src_dir ]; then
-                if [ -d $src_dir/$DISTRO ]; then
-
-                    for f in $(find $src_dir/${DISTRO} -name '*.spec' | sort -V); do
-                        NAME=$(spec_find_tag Name "$f" 2>> /dev/null)
-                        if [ $? -eq 0 ]; then
-                            touch "$AUDIT_DIR/$NAME"
-                        fi
-                    done
-                    if [ -f $src_dir/$SRPM_LIST_PATH ]; then
-
-                        for p in $(grep -v '^#' $src_dir/$SRPM_LIST_PATH | grep -v '^$'); do
-                           ORIG_SRPM_PATH=""
-                           # absolute path source rpms
-                           echo "$p" | grep "^/" >/dev/null && ORIG_SRPM_PATH=$p
-
-                           if [ "${ORIG_SRPM_PATH}x" == "x" ]; then
-                              # handle repo: definitions
-                              echo "$p" | grep "^repo:" >/dev/null && ORIG_SRPM_PATH=$(echo $p | sed "s%^repo:%$REPO_DOWNLOADS_ROOT/%")
-                           fi
-
-                           if [ "${ORIG_SRPM_PATH}x" == "x" ]; then
-                              # handle mirror: definitions
-                              echo "$p" | grep "^mirror:" >/dev/null && ORIG_SRPM_PATH=$(echo $p | sed "s%^mirror:%$MIRROR_ROOT/%" | sed "s#CentOS/tis-r3-CentOS/kilo/##" | sed "s#CentOS/tis-r3-CentOS/mitaka/##")
-                           fi
-
-                           if [ "${ORIG_SRPM_PATH}x" == "x" ]; then
-                              # we haven't found a valid prefix yet, so assume it's a legacy
-                              # file (mirror: interpretation)
-                              ORIG_SRPM_PATH="$MIRROR_ROOT/$p"
-                           fi
-
-                           if [ -f $ORIG_SRPM_PATH ]; then
-                               NAME=$(rpm -q --queryformat '%{NAME}\n' -p $ORIG_SRPM_PATH 2>> /dev/null)
-                               if [ $? -eq 0 ]; then
-                                   touch "$AUDIT_DIR/$NAME"
-                               fi
-                           fi
-                        done
-                    fi
-                fi
-            fi
-            ) &
-        done
-    done
-    echo "waiting"
-    wait
-
-    echo "Auditing for obsolete srpms Phase 2"
-    for r in $(find $SRPM_OUT -name '*.src.rpm' | sort -V); do
-        (
-        NAME=$(rpm -q --queryformat '%{NAME}\n' -p $r 2>> /dev/null)
-        ALT_NAME=$(echo $NAME | sed "s#-$BUILD_TYPE\$##")
-        FOUND=0
-
-        if [[ -f "$AUDIT_DIR/$NAME" || ( "$BUILD_TYPE" != "std" && -f "$AUDIT_DIR/$ALT_NAME" ) ]]; then
-            FOUND=1
-        fi
-
-        if [ $FOUND -eq 0 ]; then
-            for INPUT_DIR in $(find $BUILD_INPUTS -name $NAME | sort -V); do
-                if [ -d "$INPUT_DIR/rpmbuild/SRPMS" ]; then
-                    clean_srpm_dir $build_idx "$INPUT_DIR/rpmbuild/SRPMS" 0
-                fi
-                if [ -d $INPUT_DIR ]; then
-                    echo "rm -rf $r"
-                    \rm -rf $r
-                fi
-            done
-            if [ -f $r ]; then
-                \rm -f -v $r
-            fi
-        fi
-        ) &
-    done
-    echo "waiting"
-    wait
-    \rm -rf "$AUDIT_DIR"
-    fi
-    echo "Auditing for obsolete srpms done"
-fi
-
-if [ $CLEAN_FLAG -eq 1 ]; then
-    if [ $ALL -eq 1 ]; then
-       \rm -rf $BUILD_INPUTS 
-       \rm -rf $SOURCE_OUT/*.src.rpm
-    fi
-fi
-
-if [ $EDIT_FLAG -ne 1 ]; then
-   echo "==== Update repodata ====="
-   mkdir -p $SRPM_OUT/repodata
-   for d in $(find -L $SRPM_OUT -type d -name repodata); do
-      (cd $d/..
-       \rm -rf repodata
-       $CREATEREPO $(pwd)
-       create_lst $(pwd)
-      )
-   done
-   echo "==== Update repodata complete ====="
-fi
-
-FINAL_RC=0
-if [ $CLEAN_FLAG -eq 0 ] && [ $EDIT_FLAG -eq 0 ]; then
-    echo ""
-    if [ "$SRPM_FAILED_REBUILD_LIST" != "" ]; then
-       N=$(echo "$SRPM_FAILED_REBUILD_LIST" | wc -w)
-       echo "Failed to build $N packages:"
-       echo "   $SRPM_FAILED_REBUILD_LIST"
-       FINAL_RC=1
-    fi
-    if [ "$SRPM_REBUILT_LIST" != "" ]; then
-       N=$(echo "$SRPM_REBUILT_LIST" | wc -w)
-       echo "Successfully built $N packages:"
-       echo "   $SRPM_REBUILT_LIST"
-       echo ""
-       echo "Compiled src.rpm's can be found here: $SRPM_OUT"
-    fi
-    if [ "$SRPM_FAILED_REBUILD_LIST" == "" ] && [ "$SRPM_REBUILT_LIST" == "" ]; then
-       echo "No packages required a rebuild"
-    fi
-fi
-
-
-if [ "$UNRESOLVED_TARGETS" != "" ]; then
-    echo ""
-    echo "ERROR: $FUNCNAME (${LINENO}): failed to resolve build targets: $UNRESOLVED_TARGETS"
-    FINAL_RC=1
-fi
-
-exit $FINAL_RC
-) 2>&1 | stdbuf -o0 awk '{ print strftime("%H:%M:%S"), $0; fflush(); }' | tee $(date "+$MY_WORKSPACE/build-srpms-parallel_%Y-%m-%d_%H-%M-%S.log") ; exit ${PIPESTATUS[0]}
diff --git a/build-tools/build-srpms-serial b/build-tools/build-srpms-serial
deleted file mode 100755
index a9cceeab..00000000
--- a/build-tools/build-srpms-serial
+++ /dev/null
@@ -1,1424 +0,0 @@
-#!/bin/bash
-# set -x
-
-#
-# Copyright (c) 2018 Wind River Systems, Inc.
-#
-# SPDX-License-Identifier: Apache-2.0
-#
-
-#
-# Create src.rpm files from source, or from a downloaded tarball
-# or src.rpm plus our additional patches.
-#
-# This version only tries to compile on package at a time.
-#
-# The location of packages to be build are identified by
-# <distro>_pkg_dirs[_<opt-build-type>] files located at the root of
-# any git tree (e.g. stx/integ/centos_pkg_dirs).
-#
-# The build of an individul package is driven by it's build_srpm.data
-# file plus a <pkg-name>.spec file or an srpm_path file.
-#
-
-export ME=$(basename "$0")
-CMDLINE="$ME $@"
-
-
-BUILD_SRPMS_SERIAL_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}" )" )"
-source $BUILD_SRPMS_SERIAL_DIR/git-utils.sh
-source $BUILD_SRPMS_SERIAL_DIR/spec-utils
-source $BUILD_SRPMS_SERIAL_DIR/srpm-utils
-source $BUILD_SRPMS_SERIAL_DIR/classify
-source $BUILD_SRPMS_SERIAL_DIR/build-srpms-common.sh
-source $BUILD_SRPMS_SERIAL_DIR/image-utils.sh
-
-
-INITIAL_DIR=$(pwd)
-export DISTRO="centos"
-SRPM_SCRIPT="build_srpm"
-SRPM_DATA="build_srpm.data"
-PKG_DIRS_FILE="${DISTRO}_pkg_dirs"
-
-DEFAULT_SRPM_SCRIPT="$BUILD_SRPMS_SERIAL_DIR/default_$SRPM_SCRIPT"
-SCRIPT_PATH="$DISTRO"
-DATA_PATH="$DISTRO"
-FILES_PATH="$DISTRO/files"
-PATCHES_PATH="$DISTRO/patches"
-ORIG_SPECS_PATH="$DISTRO"
-SRPM_LIST_PATH="$DISTRO/srpm_path"
-
-MIRROR_ROOT="$MY_REPO/${DISTRO}-repo"
-if [ ! -d ${MIRROR_ROOT} ]; then
-    # Old value... a temporary measure for backward compatibility
-    MIRROR_ROOT="$MY_REPO/cgcs-${DISTRO}-repo"
-    if [ ! -d ${MIRROR_ROOT} ]; then
-        MIRROR_ROOT="$MY_REPO/${DISTRO}-repo"
-    fi
-fi
-
-REPO_DOWNLOADS_ROOT="$MY_REPO"
-SRPM_REBUILT_LIST=""
-SRPM_FAILED_REBUILD_LIST=""
-
-CREATEREPO=$(which createrepo_c)
-if [ $? -ne 0 ]; then
-   CREATEREPO="createrepo"
-fi
-
-#
-# Create a list of rpms in the directory
-#
-create_lst () {
-   local DIR=${1}
-
-       (cd $DIR
-        [ -f rpm.lst ] && \rm -rf rpm.lst
-        [ -f srpm.lst ] && \rm -rf srpm.lst
-        find . -name '*.rpm' -and -not -name '*.src.rpm' | sed 's#^[.][/]##' | sort > rpm.lst
-        find . -name '*.src.rpm' | sed 's#^[.][/]##' | sort > srpm.lst
-       )
-}
-
-
-usage () {
-    echo ""
-    echo "Usage: "
-    echo "   Create source rpms:"
-    echo "      $ME [--rt | --std | --installer | --containers] [--layer=<layer>] [--no-descendants] [--formal] [ list of package names ]"
-    echo ""
-    echo "   Delete source rpms, and the directories associated with it's creation:"
-    echo "   Note: does not clean an edit environment"
-    echo "      $ME --clean [--rt | --std | --installer | --containers] [optional list of package names]"
-    echo ""
-    echo "   Extract an src.rpm into a pair of git trees to aid in editing it's contents,"
-    echo "   one for source code and one for metadata such as the spec file."
-    echo "   If --no-meta-patch is specified, then WRS patches are omitted."
-    echo "      $ME --edit [--rt | --std | --installer | --containers] [--no-meta-patch] [list of package names]"
-    echo ""
-    echo "   Delete an edit environment"
-    echo "      $ME --edit --clean [--rt | --std | --installer | --containers] [list of package names]"
-    echo ""
-    echo "   This help page"
-    echo "      $ME --help"
-    echo ""
-}
-
-
-spec_cache_dir_from_srpm () {
-   local SRPM=${1}
-   local SPEC_DIR=$(echo $SRPM | sed 's#/SRPMS/#/SPECS/#')
-   echo "$SPEC_DIR"
-}
-
-result_dir_from_srpm () {
-   local SRPM=$(basename ${1} | sed 's#.src.rpm$##')
-   local RESULT_DIR="$MY_WORKSPACE/results/$MY_BUILD_ENVIRONMENT/$SRPM"
-   echo "$RESULT_DIR"
-}
-
-# This function creates a bunch of subdirs in $MY_WORKSPACE and makes sure
-# that a $MY_BUILD_CFG file exists.
-#
-# The goal of this is to have a script do as much of the annoying
-# grunt-work so that the "how to build it" instructions aren't 200 lines
-create_output_dirs () {
-	# make sure variables are sane before continuing
-	# Note that $BUILD_ROOT contains either $MY_WORKSPACE or $MY_PATCH_WORKSPACE
-	if [ "x$BUILD_ROOT" == "x" ]; then
-		return
-	fi
-	if [ "x$MY_BUILD_CFG" == "x" ]; then
-		return
-	fi
-	if [ "x$MY_BUILD_DIR" == "x" ]; then
-		return
-	fi
-	if [ "x$MY_SRC_RPM_BUILD_DIR" == "x" ]; then
-		return
-	fi
-
-	# create output dirs
-	mkdir -p $MY_BUILD_DIR
-	mkdir -p $MY_SRC_RPM_BUILD_DIR
-	mkdir -p $MY_SRC_RPM_BUILD_DIR/SOURCES
-	mkdir -p $MY_SRC_RPM_BUILD_DIR/SPECS
-	mkdir -p $MY_SRC_RPM_BUILD_DIR/BUILD
-	mkdir -p $MY_SRC_RPM_BUILD_DIR/RPMS
-	mkdir -p $MY_SRC_RPM_BUILD_DIR/SRPMS
-
-	# create $MY_BUILD_CFG, if required
-	if [ ! -f $MY_BUILD_CFG ]; then
-           echo "FORMAL_BUILD=$FORMAL_BUILD"
-           echo "modify-build-cfg $MY_BUILD_CFG"
-           ${DIR}/modify-build-cfg $MY_BUILD_CFG
-           if [ $? -ne 0 ]; then
-               echo "Could not modifiy $MY_BUILD_CFG";
-               exit 1
-           fi
-	fi
-
-}
-
-NO_DESCENDANTS=0
-NO_BUILD_INFO=0
-HELP=0
-CLEAN_FLAG=0
-FORMAL_FLAG=0
-BUILD_TYPE_FLAG=0
-EDIT_FLAG=0
-NO_META_PATCH_FLAG=0
-
-# read the options
-TEMP=$(getopt -o h --long serial,std,rt,installer,containers,no-descendants,no-meta-patch,no-build-info,help,formal,clean,edit,layer: -n "$ME" -- "$@")
-
-if [ $? -ne 0 ]; then
-    usage
-    exit 1
-fi
-
-eval set -- "$TEMP"
-
-export BUILD_TYPE=std
-
-# extract options and their arguments into variables.
-while true ; do
-    case "$1" in
-        --no-descendants) NO_DESCENDANTS=1 ; shift ;;
-        --no-build-info) NO_BUILD_INFO=1 ; shift ;;
-        -h|--help) HELP=1 ; shift ;;
-        --clean) CLEAN_FLAG=1 ; shift ;;
-        --formal) FORMAL_FLAG=1 ; shift ;;
-        --std) BUILD_TYPE_FLAG=1; BUILD_TYPE=std; shift ;;
-        --rt) BUILD_TYPE_FLAG=1; BUILD_TYPE=rt; shift ;;
-        --installer) BUILD_TYPE=installer; shift ;;
-        --containers) BUILD_TYPE=containers; shift ;;
-        --edit) EDIT_FLAG=1 ; shift ;;
-        --no-meta-patch) NO_META_PATCH_FLAG=1 ; shift ;;
-        --serial) shift ;;
-        --layer) export LAYER=$2 ; shift ; shift ;;
-        --) shift ; break ;;
-        *) echo "Internal error!" ; exit 1 ;;
-    esac
-done
-
-# Reset variables
-if [ -n "$MY_WORKSPACE" ]; then
-   export MY_WORKSPACE_TOP=${MY_WORKSPACE_TOP:-$MY_WORKSPACE}
-   export MY_WORKSPACE=$MY_WORKSPACE_TOP/$BUILD_TYPE
-else
-   export MY_PATCH_WORKSPACE_TOP=${MY_PATCH_WORKSPACE_TOP:-$MY_PATCH_WORKSPACE}
-   export MY_PATCH_WORKSPACE=$MY_PATCH_WORKSPACE_TOP/$BUILD_TYPE
-fi
-
-export MY_BUILD_DIR_TOP=${MY_BUILD_DIR_TOP:-$MY_BUILD_DIR}
-export MY_BUILD_DIR=$MY_BUILD_DIR_TOP/$BUILD_TYPE
-
-export MY_BUILD_ENVIRONMENT_TOP=${MY_BUILD_ENVIRONMENT_TOP:-$MY_BUILD_ENVIRONMENT}
-export MY_BUILD_ENVIRONMENT=$MY_BUILD_ENVIRONMENT_TOP-$BUILD_TYPE
-
-export MY_BUILD_ENVIRONMENT_FILE=$MY_BUILD_ENVIRONMENT.cfg
-export MY_SRC_RPM_BUILD_DIR=$MY_BUILD_DIR/rpmbuild
-export MY_BUILD_CFG=$MY_WORKSPACE/$MY_BUILD_ENVIRONMENT_FILE
-export MY_MOCK_ROOT=$MY_WORKSPACE/mock/root
-
-if [ "$BUILD_TYPE" != "std" ]; then
-   PKG_DIRS_FILE="${DISTRO}_pkg_dirs_${BUILD_TYPE}"
-fi
-
-echo "CLEAN_FLAG=$CLEAN_FLAG"
-TARGETS=$@
-
-if [ $HELP -eq 1 ]; then
-    usage
-    exit 0
-fi
-
-if [ $FORMAL_FLAG -eq 1 ]; then
-   export FORMAL_BUILD="yes"
-fi
-
-if [ "x$TARGETS" == "x" ] && [ $EDIT_FLAG -eq 1 ]; then
-    echo "ERROR: $FUNCNAME (${LINENO}): a package name is required when --edit is specified"
-    usage
-    exit 0
-fi
-
-SRC_ROOT="$MY_REPO"
-if [ "x$MY_REPO" == "x" ]; then
-   SRC_ROOT=$INITIAL_DIR
-fi
-
-BUILD_ROOT="$MY_WORKSPACE"
-if [ "x$MY_WORKSPACE" == "x" ]; then
-   BUILD_ROOT="$MY_PATCH_WORKSPACE"
-
-   if [ "x$MY_PATCH_WORKSPACE" == "x" ]; then
-       echo "ERROR: $FUNCNAME (${LINENO}): require one of MY_WORKSPACE or MY_PATCH_WORKSPACE be defined"
-       exit 1
-   fi
-fi
-
-export CCACHE_DIR="$BUILD_ROOT/.ccache"
-export SRC_BASE="$SRC_ROOT"
-export STX_BASE="$SRC_BASE/stx"
-export CGCS_BASE="$STX_BASE"
-export DISTRO_REPO_BASE=$MIRROR_ROOT
-export SPECS_BASE="$ORIG_SPECS_PATH"
-export FILES_BASE="$FILES_PATH"
-export PATCHES_BASE="$PATCHES_PATH"
-
-export BUILD_BASE="$BUILD_ROOT"
-BUILD_INPUTS="$BUILD_BASE/inputs"
-SRPM_ASSEMBLE="$BUILD_BASE/srpm_assemble"
-SRPM_WORK="$BUILD_BASE/srpm_work"
-
-if [ "x$MY_SRC_RPM_BUILD_DIR" != "x" ]; then
-    RPM_BUILD_ROOT=$MY_SRC_RPM_BUILD_DIR
-else
-    RPM_BUILD_ROOT=$BUILD_BASE/rpmbuild
-fi
-
-create_output_dirs
-
-export RPM_BUILD_BASE="$RPM_BUILD_ROOT"
-export SRPM_OUT="$RPM_BUILD_BASE/SRPMS"
-export SOURCE_OUT="$RPM_BUILD_BASE/SOURCES"
-export RPM_DIR="$RPM_BUILD_BASE/RPMS"
-
-if [ ! -d $CGCS_BASE ]; then
-   echo "ERROR: $FUNCNAME (${LINENO}): expected to find directory at '$CGCS_BASE'"
-   exit 1
-fi
-
-if [ ! -d $BUILD_BASE ]; then
-   if [ $CLEAN_FLAG -eq 1 ]; then
-       exit 0
-   fi
-   echo "ERROR: $FUNCNAME (${LINENO}): expected to find directory at '$BUILD_BASE'"
-   exit 1
-fi
-
-RELEASE_INFO_FILE="$(get_release_info)"
-
-if [ -f "$RELEASE_INFO_FILE" ]; then
-   source "$RELEASE_INFO_FILE"
-else
-   echo "ERROR: $FUNCNAME (${LINENO}): failed to find RELEASE_INFO_FILE=$RELEASE_INFO_FILE"
-   exit 1
-fi
-
-if [ "x$PLATFORM_RELEASE" == "x" ]; then
-   echo "ERROR: $FUNCNAME (${LINENO}): PLATFORM_RELEASE is not defined in $RELEASE_INFO_FILE"
-   exit 1
-fi
-
-export PLATFORM_RELEASE
-
-mkdir -p $RPM_BUILD_BASE
-if [ $? -ne 0 ]; then
-   echo "ERROR: $FUNCNAME (${LINENO}): Failed to create directory '$RPM_BUILD_BASE'"
-   exit 1
-fi
-
-mkdir -p $SRPM_OUT
-if [ $? -ne 0 ]; then
-   echo "ERROR: $FUNCNAME (${LINENO}): Failed to create directory '$SRPM_OUT'"
-   exit 1
-fi
-
-mkdir -p $RPM_DIR
-if [ $? -ne 0 ]; then
-   echo "ERROR: $FUNCNAME (${LINENO}): Failed to create directory '$RPM_DIR'"
-   exit 1
-fi
-
-mkdir -p $SRPM_ASSEMBLE
-if [ $? -ne 0 ]; then
-   echo "ERROR: $FUNCNAME (${LINENO}): Failed to create directory '$SRPM_ASSEMBLE'"
-   exit 1
-fi
-
-mkdir -p $BUILD_INPUTS
-if [ $? -ne 0 ]; then
-   echo "ERROR: $FUNCNAME (${LINENO}): Failed to create directory '$BUILD_INPUTS'"
-   exit 1
-fi
-
-build_dir () {
-   local d=$1
-   local w=$2
-   export PKG_BASE=$d
-   export WORK_BASE=$w
-   export SPECS_BASE="$PKG_BASE/$ORIG_SPECS_PATH"
-   local RC
-
-   local ORIG_DIR=$(pwd)
-   # echo "build_dir: PKG_BASE=$PKG_BASE"
-
-   cd "$PKG_BASE"
-   if [ $? -ne 0 ]; then
-      echo "ERROR: $FUNCNAME (${LINENO}): failed to cd into '$PKG_BASE'"
-      return 1
-   fi
-
-   if [ ! -d $ORIG_SPECS_PATH ]; then
-      # nothing to do
-      echo "WARNING: '$ORIG_SPECS_PATH' not found in '$PKG_BASE'"
-      cd "$ORIG_DIR"
-      return 0
-   fi
-
-   SRPM_COUNT=0
-   ORIG_SRPM_PATH=""
-   if [ -f $SRPM_LIST_PATH ]; then
-      # we've found a file (ex centos/srpm_path) which lists a path to a source
-      # RPM file
-      #
-      # The specified file can be of the form
-      #
-      # repo:path/to/file.src.rpm
-      # mirror:path/to/file.src.rpm
-      # /path/to/file.rpm
-      # path/to/file.rpm
-      #
-      # If "repo:" is specified, then we search for the file relative to
-      # $REPO_DOWNLOADS_ROOT (i.e. a path to the file in a "downloads subgit)
-      #
-      # If "mirror:" is specified, then we search for the file relateive to
-      # $MIRROR_ROOT 
-      #
-      # An absolute path is parsed as an absolute path (mainly intended for
-      # developer/experimental use without checking in files or messing with
-      # your git repos)
-      #
-      # A lack of prefix (relative path name) is interpretted as "mirror:"
-      # (legacy support for existing packages)
-      #
-      # Other prefixes (file:, http:, whatever:)are unsupported at this time
-
-      for p in $(grep -v '^#' $SRPM_LIST_PATH | grep -v '^$'); do
-         # absolute path source rpms
-         echo "$p" | grep "^/" >/dev/null && ORIG_SRPM_PATH=$p
-
-         if [ "${ORIG_SRPM_PATH}x" == "x" ]; then
-            # handle repo: definitions
-            echo "$p" | grep "^repo:" >/dev/null && ORIG_SRPM_PATH=$(echo $p | sed "s%^repo:%$REPO_DOWNLOADS_ROOT/%")
-         fi
-
-         if [ "${ORIG_SRPM_PATH}x" == "x" ]; then
-            # handle mirror: definitions
-            echo "$p" | grep "^mirror:" >/dev/null && ORIG_SRPM_PATH=$(echo $p | sed "s%^mirror:%$MIRROR_ROOT/%" |  sed "s#CentOS/tis-r3-CentOS/kilo/##" | sed "s#CentOS/tis-r3-CentOS/mitaka/##")
-         fi
-
-         if [ "${ORIG_SRPM_PATH}x" == "x" ]; then
-            # we haven't found a valid prefix yet, so assume it's a legacy
-            # file (mirror: interpretation)
-            ORIG_SRPM_PATH="$MIRROR_ROOT/$p"
-         fi
-
-         # echo "ORIG_SRPM_PATH=$ORIG_SRPM_PATH"
-         if [ -f $ORIG_SRPM_PATH ]; then
-             SRPM_COUNT=$((SRPM_COUNT + 1))
-         else
-             echo "ERROR: $FUNCNAME (${LINENO}): Invalid srpm path '$p', evaluated as '$ORIG_SRPM_PATH', found in '$PKG_BASE/$SRPM_LIST_PATH'"
-             ORIG_SRPM_PATH=""
-             return 3
-         fi
-      done
-   fi
-
-   # Clean up an tmp_spec_*.spec file left by a prior failed build
-   for f in $(find $ORIG_SPECS_PATH -name 'tmp_spec_*.spec'); do 
-      \rm -f $f
-   done
-
-   SPEC_COUNT=$(find $ORIG_SPECS_PATH -name '*.spec' | wc -l)
-   if [ $SPEC_COUNT -eq 0 ]; then
-      if [ -f $ORIG_SPECS_PATH/spec_path ]; then
-         SPECS_BASE=$SRC_BASE/$(cat $SPECS_BASE/spec_path)
-         SPEC_COUNT=$(find $SPECS_BASE -maxdepth 1 -name '*.spec' | wc -l)
-      fi
-   fi
-
-   if [ $SPEC_COUNT -eq 0 ] && [ $SRPM_COUNT -eq 0 ]; then
-      # nothing to do
-      echo "ERROR: $FUNCNAME (${LINENO}): Neither srpm_path nor .spec file not found in '$PKG_BASE/$ORIG_SPECS_PATH'"
-      cd "$ORIG_DIR"
-      return 0
-   fi
-
-
-   if [ $SPEC_COUNT -gt 0 ] && [ $SRPM_COUNT -gt 0 ]; then
-      # nothing to do
-      echo "ERROR: $FUNCNAME (${LINENO}): Please provide only one of srpm_path or .spec files, not both, in '$PKG_BASE/$ORIG_SPECS_PATH'"
-      cd $ORIG_DIR
-      return 0
-   fi
-
-   if [  $SPEC_COUNT -gt 0 ]; then
-      build_dir_spec
-      RC=$?
-      cd "$ORIG_DIR"
-      return $RC
-   else
-      build_dir_srpm $ORIG_SRPM_PATH
-      RC=$?
-      cd "$ORIG_DIR"
-      return $RC
-   fi
-
-   cd "$ORIG_DIR"
-   return 0
-}
-
-
-clean_srpm_dir () {
-   local DIR=$1
-   local EXCLUDE_MD5=$2
-   local SRPM_PATH
-   local SRPM_FILE
-   local SRPM_OUT_PATH
-   local SRPM_NAME
-   local SRPM_OUT_NAME
-   local INPUTS_TO_CLEAN=""
-
-   if [ "$EXCLUDE_MD5" == "" ]; then
-       EXCLUDE_MD5=0
-   fi
-
-   echo "clean_srpm_dir DIR=$DIR"
-
-   INPUTS_TO_CLEAN=$(dirname $(dirname $DIR))
-   echo "$INPUTS_TO_CLEAN" | grep -q "^$BUILD_INPUTS/"
-   if [ $? -ne 0 ] ; then
-       INPUTS_TO_CLEAN=""
-   fi
-
-   for SRPM_PATH in $(find "$DIR" -name '*.src.rpm'); do
-       SRPM_FILE=$(basename $SRPM_PATH)
-       SRPM_NAME=$(rpm -q --queryformat '%{NAME}\n' --nosignature -p $SRPM_PATH 2>> /dev/null)
-       \rm -fv $SRPM_PATH $SRPM_OUT/$SRPM_FILE
-       if [ -d $SRPM_ASSEMBLE/$SRPM_NAME ]; then
-           echo "rm -rf $SRPM_ASSEMBLE/$SRPM_NAME"
-           \rm -rf $SRPM_ASSEMBLE/$SRPM_NAME
-       fi
-
-       if [ -d $SOURCE_OUT/$SRPM_FILE ]; then
-           echo "rm -rf $SOURCE_OUT/$SRPM_FILE"
-           \rm -rf $SOURCE_OUT/$SRPM_FILE
-       fi
-
-       if [ $EXCLUDE_MD5 -eq 0 ] && [ -d $SOURCE_OUT/$SRPM_NAME ]; then
-           echo "rm -rf $SOURCE_OUT/$SRPM_NAME"
-           \rm -rf $SOURCE_OUT/$SRPM_NAME
-       fi
-
-       local d
-       local src_d
-       local spec
-       local spec_name
-
-       for d in $(find $BUILD_INPUTS -type d -name "${SRPM_NAME}*") ;do
-           src_d=$(echo $d | sed "s#^$BUILD_INPUTS/#$MY_REPO/#")
-
-           for spec in $(find $src_d/${DISTRO} -name '*.spec'); do
-               spec_name=$(spec_find_tag Name $spec)
-               if [ "$spec_name" == "$SRPM_NAME" ]; then
-                   INPUTS_TO_CLEAN=$(if [ "x$INPUTS_TO_CLEAN" != "x" ]; then echo $INPUTS_TO_CLEAN; fi; echo "$d")
-               fi
-           done
-       done
-
-       # Look for older versions of the same src rpm that also need cleaning
-       for SRPM_OUT_PATH in $(ls -1 $SRPM_OUT/$SRPM_NAME*.src.rpm 2>> /dev/null); do
-           SRPM_OUT_FILE=$(basename $SRPM_OUT_PATH)
-           SRPM_OUT_NAME=$(rpm -q --queryformat '%{NAME}\n' -p $SRPM_OUT_PATH 2>> /dev/null)
-           if [ "$SRPM_NAME" == "$SRPM_OUT_NAME" ]; then
-              \rm -fv $SRPM_OUT_PATH
-              if [ -d $SOURCE_OUT/$SRPM_OUT_FILE ]; then
-                  echo "rm -rf $SOURCE_OUT/$SRPM_OUT_FILE"
-                  \rm -rf $SOURCE_OUT/$SRPM_OUT_FILE
-              fi
-
-           fi
-       done
-   done
-
-   if [ "x$INPUTS_TO_CLEAN" != "x" ]; then
-       for d in $INPUTS_TO_CLEAN; do
-           if [ -d $d/rpmbuild ]; then
-               echo "rm -rf $d"
-               \rm -rf $d
-           fi
-       done
-   fi
-}
-
-build_dir_srpm () {
-   local ORIG_SRPM_PATH=$1
-
-   local ORIG_SRPM=$(basename $ORIG_SRPM_PATH)
-   local NAME=$(rpm -q --queryformat '%{NAME}\n' --nosignature -p $ORIG_SRPM_PATH)
-   local PKG_NAME_VER=$(rpm -q --queryformat '%{NAME}-%{VERSION}-%{RELEASE}\n' --nosignature -p $ORIG_SRPM_PATH)
-   local PKG_DIR="$NAME"
-   local TARGET_FOUND=""
-   local RC=0
-
-   export SRPM_EXPORT_NAME=$NAME
-   export SRPM_EXPORT_VER=$VER
-
-   local NEED_BUILD=0
-
-   if [ "x$TARGETS" == "x" ]; then
-      NEED_BUILD=1
-      TARGET_FOUND=$NAME
-   else
-      TARGET_LIST=( $TARGETS )
-      TARGET_FOUND=$(srpm_match_target_list TARGET_LIST  "$ORIG_SRPM_PATH" 2>> /dev/null)
-      if [ $? -eq 0 ]; then
-         echo "found target '$TARGET_FOUND' in '$ORIG_SRPM'"
-         NEED_BUILD=1
-         UNRESOLVED_TARGETS=$(echo "$UNRESOLVED_TARGETS" | sed "s/\(^\|[[:space:]]\)$TARGET_FOUND\([[:space:]]\|$\)/ /g")
-      fi
-   fi
-
-   if [ $NEED_BUILD -eq 0 ]; then
-      return 0
-   fi
-
-   local ROOT_DIR="$SRPM_ASSEMBLE"   
-   if [ $EDIT_FLAG -eq 1 ]; then
-      mkdir -p $SRPM_WORK
-      ROOT_DIR="$SRPM_WORK"
-   fi
-   local PKG_ROOT_DIR="$ROOT_DIR/$PKG_DIR"   
-   local BUILD_DIR="$PKG_DIR/rpmbuild"
-   local FULL_BUILD_DIR="$ROOT_DIR/$BUILD_DIR"
-   local SRPM_DIR="$FULL_BUILD_DIR/SRPMS"
-   local SOURCES_DIR="$SOURCE_OUT"
-   
-   if [ $CLEAN_FLAG -eq 1 ]; then
-      # clean
-      echo "===== Cleaning '$TARGET_FOUND' ====="
-
-      if [ -d $SRPM_DIR ] && [ $EDIT_FLAG -eq 0 ]; then
-         clean_srpm_dir "$SRPM_DIR" 0
-      fi
-
-      if [ -d $PKG_ROOT_DIR ]; then
-          echo "rm -rf $PKG_ROOT_DIR"
-          \rm -rf "$PKG_ROOT_DIR"
-      fi
-   else
-      #build
-      echo "===== Build SRPM for '$TARGET_FOUND' ====="
-      echo "PKG_BASE=$PKG_BASE"
-      echo "BUILD_DIR=$BUILD_DIR"
-      echo "SRPM_DIR=$SRPM_DIR"
-
-      if [ ! -d $ROOT_DIR ]; then
-         mkdir -p "$ROOT_DIR"
-         if [ $? -ne 0 ]; then
-            echo "ERROR: $FUNCNAME (${LINENO}): mkdir '$ROOT_DIR' failed"
-            return 1
-         fi
-      fi
-
-      #
-      # Load data from build_srpm.data
-      #
-      export DATA="$DATA_PATH/$SRPM_DATA"
-
-      if [ -f "$DATA" ]; then
-          srpm_source_build_data "$DATA" "$SRC_BUILD_TYPE_SRPM" "$ORIG_SRPM_PATH"
-          if [ $? -ne 0 ]; then
-              echo "ERROR: $FUNCNAME (${LINENO}): failed to source $DATA"
-              return 1
-          fi
-      fi
-
-      #
-      # Capture md5 data for all input files
-      #
-      local TARGET_SOURCES_DIR="$SOURCES_DIR/$TARGET_FOUND"
-      local INPUT_FILES_MD5="$TARGET_SOURCES_DIR/srpm_input.md5"
-      local REFERENCE_MD5="$TARGET_SOURCES_DIR/srpm_reference.md5"
-
-      mkdir -p "$TARGET_SOURCES_DIR"
-      md5sums_from_input_vars "$SRC_BUILD_TYPE_SRPM" "$ORIG_SRPM_PATH" "$TARGET_SOURCES_DIR" > "$INPUT_FILES_MD5"
-      if [ $? -ne 0 ]; then
-         echo "ERROR: $FUNCNAME (${LINENO}): md5sums_from_input_vars '$SRC_BUILD_TYPE_SRPM' '$ORIG_SRPM_PATH' '$TARGET_SOURCES_DIR'"
-         return 1
-      fi
-      echo "Wrote: $INPUT_FILES_MD5"
-
-      #
-      # Is a rebuild required?
-      # Compare md5 of current inputs vs md5 of previous build?
-      #
-      local BUILD_NEEDED=0
-      local SRPM_OUT_PATH2
-      local DIFF_LINE
-      local DIFF_FILE
-
-
-      if [ -f $REFERENCE_MD5 ]; then
-         DIFF_LINE=$(diff "$INPUT_FILES_MD5" "$REFERENCE_MD5" | head -n 2 | tail -n 1; exit ${PIPESTATUS[0]})
-         if [ $? -ne 0 ]; then
-            DIFF_FILE=$(echo "$DIFF_LINE" | cut -d ' ' -f4-)
-            BUILD_NEEDED=1
-            case ${DIFF_LINE:0:1} in
-               '>') echo "Rebuild required due to deleted file: $DIFF_FILE" ;;
-               '<') echo "Rebuild required due to new or changed file: $DIFF_FILE" ;;
-               *)  echo "Rebuild required due to diff: $DIFF_LINE" ;;
-            esac
-         fi
-      else
-         echo "Rebuild required due to missing reference md5: $REFERENCE_MD5"
-         BUILD_NEEDED=1
-      fi
-
-      if [ -d "$FULL_BUILD_DIR/SRPMS" ]; then
-         b=""
-         for SRPM_PATH in $(find "$FULL_BUILD_DIR/SRPMS" -name '*.src.rpm' | sort -V); do
-            b=$(basename $SRPM_PATH)
-            SRPM_OUT_PATH2=$(find $SRPM_OUT -name $b)
-            if [ "x$SRPM_OUT_PATH2" == "x" ]; then
-               echo "Rebuild required due to missing srpm: $b"
-               BUILD_NEEDED=1
-            fi
-         done
-
-         if [ "$b" == "" ]; then
-            echo "Rebuild required due no src.rpm in directory: '$FULL_BUILD_DIR/SRPMS'"
-            BUILD_NEEDED=1
-         fi
-      else
-         echo "Rebuild required due to missing directory: '$FULL_BUILD_DIR/SRPMS'"
-         BUILD_NEEDED=1
-      fi
-   
-      if [ $BUILD_NEEDED -eq 0 ]; then
-         echo "SRPM build not required for '$PKG_BASE'"
-         echo "===== Build complete for '$TARGET_FOUND' ====="
-         echo
-         return 0
-      fi
-   
-      if [ $EDIT_FLAG -eq 0 ]; then
-         clean_srpm_dir "$FULL_BUILD_DIR/SRPMS" 1
-
-         if [ -d $PKG_ROOT_DIR ]; then
-            echo "rm -rf $PKG_ROOT_DIR"
-            \rm -rf $PKG_ROOT_DIR
-         fi
-      fi
-
-      if [ $EDIT_FLAG -eq 1 ]; then
-         PKG_CLASSIFICATION=$(classify $PKG_BASE)
-         echo "$PKG_CLASSIFICATION = classify $PKG_BASE"
-         if [ "$PKG_CLASSIFICATION" == "spec + tarball" ] || [ "$PKG_CLASSIFICATION" == "srpm + patches" ]; then
-            echo "OK to edit $PKG_BASE"
-         else
-            echo "Can't edit this package, it is of type '$PKG_CLASSIFICATION', it is not derived from SRPM or tarball and patches"
-            return 1
-         fi
-
-         echo "srpm_extract_to_git '$ORIG_SRPM_PATH' '$PKG_BASE' '$ROOT_DIR' '$BUILD_DIR' '$PKG_NAME_VER' '$NO_META_PATCH_FLAG' '$TIS_PATCH_VER' '$PBR_VERSION'"
-         srpm_extract_to_git $ORIG_SRPM_PATH $PKG_BASE $ROOT_DIR $BUILD_DIR $PKG_NAME_VER $NO_META_PATCH_FLAG $TIS_PATCH_VER $PBR_VERSION
-         RC=$?
-         if [ $RC -ne 0 ]; then
-            if [ $RC -eq 1 ]; then
-               echo "ERROR: $FUNCNAME (${LINENO}): failed to extract srpm '$ORIG_SRPM_PATH'"
-            fi
-            return $RC
-         fi
-
-         local LOC
-         LOC=$(git_list_containing_tag "${PKG_ROOT_DIR}/gits" "pre_wrs_$PKG_NAME_VER" | head -n 1 )
-         echo "===== '$TARGET_FOUND' has been extracted for editing. ====="
-         echo "===== Metadata can be found at: $PKG_ROOT_DIR/rpmbuild"
-         echo "===== Source code can be found at: $LOC"
-         return 0
-      fi
-
-      #
-      # Find age of youngest input file.
-      # We will apply this as the creation/modification timestamp of the src.rpm we produce.
-      #
-      AGE=$(find $PKG_BASE $ORIG_SRPM_PATH ! -path '*/.git/*' ! -path '*/.tox/*' -type f -exec stat --format '%Y' "{}" \; | sort -nr | head -n 1)
-      if [ -f $PKG_BASE/$DATA ]; then
-         AGE2=$(
-               cd $PKG_BASE
-               PATH_LIST=""
-
-               # NOTE: SRC_DIR is not honored in this build path
-      
-               if [ "x$COPY_LIST" != "x" ]; then
-                  PATH_LIST="$PATH_LIST $COPY_LIST"
-               fi
-      
-               # NOTE: COPY_LIST_TO_TAR is not honored in this build path
-
-      
-               if [ "x$PATH_LIST" == "x" ]; then
-                  echo "0"
-               else
-                  AGE2=$(find -L $PATH_LIST ! -path '*/.git/*' ! -path '*/.tox/*' -type f -exec stat --format '%Y' "{}" \; | sort -nr | head -n 1)
-                  echo  "$AGE2"
-               fi
-               )
-          if [ $AGE2 -gt $AGE ]; then
-             AGE=$AGE2
-          fi
-      fi
-      
-      srpm_extract $ORIG_SRPM_PATH $PKG_BASE $ROOT_DIR $BUILD_DIR $PKG_NAME_VER
-      if [ $? -ne 0 ]; then
-         echo "ERROR: $FUNCNAME (${LINENO}): failed to extract srpm '$ORIG_SRPM_PATH'"
-         return 1
-      fi
-
-      if [ "x$COPY_LIST" != "x" ]; then
-         echo "COPY_LIST: $COPY_LIST"
-         for p in $COPY_LIST; do
-            # echo "COPY_LIST: $p"
-            \cp -L -r -f -v $p $FULL_BUILD_DIR/SOURCES
-            if [ $? -ne 0 ]; then
-               echo "ERROR: $FUNCNAME (${LINENO}): COPY_LIST: file not found: '$p'"
-               return 1
-            fi
-         done
-      fi
-
-      srpm_assemble $FULL_BUILD_DIR $TIS_PATCH_VER $PBR_VERSION
-      if [ $? -ne 0 ]; then
-         echo "ERROR: $FUNCNAME (${LINENO}): failed to assemble srpm for '$PKG_NAME_VER'"
-         SRPM_FAILED_REBUILD_LIST="$SRPM_FAILED_REBUILD_LIST $TARGET_FOUND"
-         return 1
-      fi
-
-      TS=$(date -d @$AGE +%Y-%m-%dT%H:%M:%S)
-      for s in $(find $FULL_BUILD_DIR/SRPMS -name '*.src.rpm'); do
-         \cp -L -f -v $s $SRPM_OUT/
-         ss=$(basename $s)
-         touch $SRPM_OUT/$ss --date=$TS
-
-         mkdir -p $SOURCES_DIR/$ss
-         BIG_FLAG_FILE="$SOURCES_DIR/$ss/BIG"
-         SLOW_FLAG_FILE="$SOURCES_DIR/$ss/SLOW"
-
-         if [ $BUILD_IS_BIG -gt 0 ]; then
-             echo "$BUILD_IS_BIG" > $BIG_FLAG_FILE
-         else
-             if [ -f $BIG_FLAG_FILE ]; then
-                 \rm -f $BIG_FLAG_FILE
-             fi
-         fi
-
-         if [ $BUILD_IS_SLOW -gt 0 ]; then
-             echo "$BUILD_IS_SLOW" > $SLOW_FLAG_FILE
-         else
-             if [ -f $SLOW_FLAG_FILE ]; then
-                 \rm -f $SLOW_FLAG_FILE
-             fi
-         fi
-
-         \rm -f -v "$REFERENCE_MD5"
-         \mv -v "$INPUT_FILES_MD5" "$REFERENCE_MD5"
-
-         local SPEC_DIR=$(spec_cache_dir_from_srpm $SRPM_OUT/$ss)
-         if [ -d $SPEC_DIR/BUILDS_VR ]; then
-            for f in $(ls -1 $SPEC_DIR/BUILDS_VR); do
-                for r in $(find  $RPM_DIR -name "$f*rpm" 2>> /dev/null); do
-                   \rm -f -v $r
-                done
-            done
-         fi
-
-         local RESULT_DIR=$(result_dir_from_srpm $SRPM_OUT/$ss)
-         if [ -d $RESULT_DIR ]; then
-             echo "rm -rf $RESULT_DIR"
-             \rm -rf $RESULT_DIR
-         fi
-      done
-
-      SRPM_REBUILT_LIST="$SRPM_REBUILT_LIST $TARGET_FOUND"
-      echo "SRPM build successful for '$PKG_NAME_VER'"
-      echo "===== Build complete for '$TARGET_FOUND' ====="
-      echo
-
-   fi
-
-   return 0
-}
-
-
-build_dir_spec () {
-   local NEED_BUILD=0
-   local TARGET_FOUND=""
-
-   if [ "x$TARGETS" == "x" ]; then
-      NEED_BUILD=1
-      for f in $(find $SPECS_BASE -maxdepth 1 -name '*.spec'); do
-         TARGET_FOUND=$(spec_find_tag Name "$f" 2>> /dev/null)
-         if [ $? -ne 0 ]; then
-             TARGET_FOUND=$(spec_find_global service "$f" 2>> /dev/null)
-             if [ $? -ne 0 ]; then
-                 TARGET_FOUND=""
-             fi
-         fi
-      done
-   else
-      TARGET_LIST=( $TARGETS )
-      for f in $(find $SPECS_BASE -maxdepth 1 -name '*.spec' 2>> /dev/null); do
-         TARGET_FOUND=$(spec_match_target_list TARGET_LIST "$f" 2>> /dev/null)
-         if [ $? -eq 0 ]; then
-            echo "found target '$TARGET_FOUND' in '$f'"
-            NEED_BUILD=1
-            UNRESOLVED_TARGETS=$(echo "$UNRESOLVED_TARGETS" | sed "s/\(^\|[[:space:]]\)$TARGET_FOUND\([[:space:]]\|$\)/ /g")
-            break
-         fi
-      done
-   fi
-
-   if [ $NEED_BUILD -eq 1 ]; then
-      MAKE_SRPM="$SCRIPT_PATH/$SRPM_SCRIPT"
-      export DATA="$DATA_PATH/$SRPM_DATA"
-
-      export RPMBUILD_BASE="$WORK_BASE/rpmbuild"
-      SRPM_PATH="$RPMBUILD_BASE/SRPMS"
-      SPEC_PATH="$RPMBUILD_BASE/SPECS"
-      SOURCES_PATH="$RPMBUILD_BASE/SOURCES"
-      local ROOT_DIR="$RPMBUILD_BASE"
-      local PKG_ROOT_DIR="$RPMBUILD_BASE"
-      local SPEC=$(find $SPECS_BASE -maxdepth 1 -name '*.spec' | head -n 1)
-      local NAME=$(spec_find_tag Name $SPEC)
-      local PKG_NAME_VER=$(spec_name_ver_rel $SPEC)
-      local PKG_DIR="$NAME"
-      local BUILD_DIR="$PKG_DIR/rpmbuild"
-      local FULL_BUILD_DIR="$ROOT_DIR"
-      local SRPM_DIR="$FULL_BUILD_DIR/SRPMS"
-      local SOURCES_DIR="$SOURCE_OUT"
-
-      if [ $EDIT_FLAG -eq 1 ]; then
-         mkdir -p $SRPM_WORK
-         ROOT_DIR="$SRPM_WORK"
-         PKG_ROOT_DIR="$ROOT_DIR/$PKG_DIR"   
-      fi
-
-      if [ $CLEAN_FLAG -eq 1 ]; then
-         # clean
-         echo "===== Cleaning '$TARGET_FOUND' ====="
-         if [ -d $SRPM_PATH ] && [ $EDIT_FLAG -eq 0 ]; then
-            clean_srpm_dir $SRPM_PATH 0
-         fi
-
-         if [ -d $PKG_ROOT_DIR ]; then
-             echo "rm -rf $PKG_ROOT_DIR"
-             \rm -rf "$PKG_ROOT_DIR"
-         fi
-      else
-         # build
-         echo "===== Build SRPM for '$TARGET_FOUND' ====="
-         echo "PKG_BASE=$PKG_BASE"
-         echo "WORK_BASE=$WORK_BASE"
-         echo "RPMBUILD_BASE=$RPMBUILD_BASE"
-         if [ ! -x $MAKE_SRPM ]; then
-            if [ ! -f $DATA ]; then
-                echo "expected to find an executable script at '$MAKE_SRPM' or data for the default script at '$DATA'"
-                cd $INITIAL_DIR
-                exit 1
-            else
-                MAKE_SRPM="$DEFAULT_SRPM_SCRIPT"
-            fi
-         fi
-   
-         #
-         # Load data from build_srpm.data
-         #
-
-         if [ -f "$DATA" ]; then
-             srpm_source_build_data "$DATA" "$SRC_BUILD_TYPE_SPEC" "$SPEC"
-             if [ $? -ne 0 ]; then
-                 echo "ERROR: $FUNCNAME (${LINENO}): failed to source $DATA"
-                 return 1
-             fi
-         fi
-
-         #
-         # Capture md5 data for all input files
-         #
-         local TARGET_SOURCES_DIR="$SOURCES_DIR/$TARGET_FOUND"
-         local INPUT_FILES_MD5="$TARGET_SOURCES_DIR/srpm_input.md5"
-         local REFERENCE_MD5="$TARGET_SOURCES_DIR/srpm_reference.md5"
-
-         mkdir -p "$TARGET_SOURCES_DIR"
-         md5sums_from_input_vars "$SRC_BUILD_TYPE_SPEC" "$SPEC" "$TARGET_SOURCES_DIR" > "$INPUT_FILES_MD5"
-         if [ $? -ne 0 ]; then
-             echo "ERROR: $FUNCNAME (${LINENO}): md5sums_from_input_vars '$SRC_BUILD_TYPE_SPEC' '$SPEC' '$TARGET_SOURCES_DIR'"
-             return 1
-         fi
-         echo "Wrote: $INPUT_FILES_MD5"
-
-         #
-         # Is a rebuild required?
-         # Compare md5 of current inputs vs md5 of previous build?
-         #
-         local BUILD_NEEDED=0
-         local SRPM_OUT_PATH2
-         local DIFF_LINE
-         local DIFF_FILE
-
-         if [ -f $REFERENCE_MD5 ]; then
-            DIFF_LINE=$(diff "$INPUT_FILES_MD5" "$REFERENCE_MD5" | head -n 2 | tail -n 1; exit ${PIPESTATUS[0]})
-            if [ $? -ne 0 ]; then
-               DIFF_FILE=$(echo "$DIFF_LINE" | cut -d ' ' -f4-)
-               BUILD_NEEDED=1
-               case ${DIFF_LINE:0:1} in
-                  '>') echo "Rebuild required due to deleted file: $DIFF_FILE" ;;
-                  '<') echo "Rebuild required due to new or changed file: $DIFF_FILE" ;;
-                  *)  echo "Rebuild required due to diff: $DIFF_LINE" ;;
-               esac
-            fi
-         else
-            echo "Rebuild required due to missing reference md5: $REFERENCE_MD5"
-            BUILD_NEEDED=1
-         fi
-
-         if [ -d "$FULL_BUILD_DIR/SRPMS" ]; then
-            if [ -d "$RPMBUILD_BASE/SRPMS" ]; then
-               b=""
-               for SRPM_PATH2 in $(find "$RPMBUILD_BASE/SRPMS" -name '*.src.rpm' | sort -V); do
-                  b=$(basename $SRPM_PATH2)
-                  SRPM_OUT_PATH2=$(find $SRPM_OUT -name $b)
-                  if [ "x$SRPM_OUT_PATH2" == "x" ]; then
-                     echo "Rebuild required due to missing srpm: $b"
-                     BUILD_NEEDED=1
-                  fi
-               done
-
-               if [ "$b" == "" ]; then
-                   echo "Rebuild required due no src.rpm found in directory: '$RPMBUILD_BASE/SRPMS'"
-                   BUILD_NEEDED=1
-               fi
-            else
-               echo "Rebuild required due to missing directory: '$RPMBUILD_BASE/SRPMS'"
-               BUILD_NEEDED=1
-            fi
-         else
-            echo "Rebuild required due to missing directory: '$FULL_BUILD_DIR/SRPMS'"
-            BUILD_NEEDED=1
-         fi
-   
-         if [ $EDIT_FLAG -eq 1 ]; then
-
-            PKG_CLASSIFICATION=$(classify $PKG_BASE)
-            echo "$PKG_CLASSIFICATION = classify $PKG_BASE"
-            if [ "$PKG_CLASSIFICATION" == "spec + tarball" ] || [ "$PKG_CLASSIFICATION" == "srpm + patches" ]; then
-               echo "OK to edit $PKG_BASE"
-            else
-               echo "Can't edit this package, it is of type '$PKG_CLASSIFICATION', it is not derived from SRPM or tarball and patches"
-               return 1
-            fi
-
-            echo "tar_and_spec_extract_to_git '$SPEC' '$PKG_BASE' '$ROOT_DIR' '$BUILD_DIR' '$PKG_NAME_VER' '$NO_META_PATCH_FLAG' '$TIS_PATCH_VER' '$PBR_VERSION'"
-            tar_and_spec_extract_to_git "$SPEC" "$PKG_BASE" "$ROOT_DIR" "$BUILD_DIR" "$PKG_NAME_VER" "$NO_META_PATCH_FLAG" "$TIS_PATCH_VER" "$PBR_VERSION"
-            RC=$?
-            if [ $RC -ne 0 ]; then
-               if [ $RC -eq 1 ]; then
-                  echo "ERROR: $FUNCNAME (${LINENO}): failed to extract srpm '$ORIG_SRPM_PATH'"
-               fi
-               return $RC
-            fi
-   
-            local LOC
-            LOC=$(git_list_containing_branch "${PKG_ROOT_DIR}/gits" "${PKG_NAME_VER}" | head -n 1 )
-            echo "===== '$TARGET_FOUND' has been extracted for editing. ====="
-            echo "===== Metadata can be found at: $PKG_ROOT_DIR/rpmbuild"
-            echo "===== Source code can be found at: $LOC"
-            return 0
-         fi
-
-         if [ $BUILD_NEEDED -eq 0 ]; then
-            echo "SRPM build not required for '$PKG_BASE'"
-            echo "===== Build complete for '$TARGET_FOUND' ====="
-            echo
-            return 0
-         fi
-
-
-         export SRC_BUILD_TYPE="$SRC_BUILD_TYPE_SPEC"
-         export SRPM_OR_SPEC_PATH="$SPEC"
-
-         echo "MAKE_SRPM=$MAKE_SRPM"
-         echo "DATA=$DATA"
-         echo "SRC_BUILD_TYPE=$SRC_BUILD_TYPE"
-         echo "SRPM_OR_SPEC_PATH=$SRPM_OR_SPEC_PATH"
-   
-         if [ -d "$RPMBUILD_BASE/SRPMS" ]; then
-             clean_srpm_dir "$RPMBUILD_BASE/SRPMS" 1
-         fi
-         if [ -d $RPMBUILD_BASE ]; then
-             echo "rm -rf $RPMBUILD_BASE"
-             \rm -rf "$RPMBUILD_BASE"
-         fi
-
-         echo "mkdir -p $WORK_BASE $SRPM_PATH $SPEC_PATH $SOURCES_PATH"
-         mkdir -p "$WORK_BASE" && \
-         mkdir -p "$SRPM_PATH" && \
-         mkdir -p "$SPEC_PATH" && \
-         mkdir -p "$SOURCES_PATH" 
-         if [ $? -ne 0 ]; then
-             echo "ERROR: Failed to create directories under: $WORK_BASE"
-         fi
-
-         \cp -L -f -v $SPECS_BASE/*.spec $SPEC_PATH/
-         if [ $? -ne 0 ]; then
-             echo "ERROR: $FUNCNAME (${LINENO}): Failed to copy spec files from '$SPECS_BASE' to '$SPEC_PATH'"
-         fi
-
-         #
-         # build
-         #
-         $MAKE_SRPM
-         if [ $? -ne 0 ]; then
-            echo "ERROR: $FUNCNAME (${LINENO}): script failed '$MAKE_SRPM'"
-            SRPM_FAILED_REBUILD_LIST="$SRPM_FAILED_REBUILD_LIST $TARGET_FOUND"
-            exit 1
-         fi
-
-         #
-         # Find age of youngest input file.
-         # We will apply this as the creation/modification timestamp of the src.rpm we produce.
-         #
-         AGE=$(find $PKG_BASE ! -path '*/.git/*' ! -path '*/.tox/*' -type f -exec stat --format '%Y' "{}" \; | sort -nr | head -n 1)
-         if [ -f $PKG_BASE/$DATA ]; then
-            AGE2=$(
-                  cd $PKG_BASE
-                  PATH_LIST=""
-                  if [ "x$SRC_DIR" != "x" ]; then
-                     if [ -d "$SRC_DIR" ]; then
-                        PATH_LIST="$PATH_LIST $SRC_DIR"
-                     fi
-                  fi
-
-                  if [ "x$COPY_LIST" != "x" ]; then
-                     PATH_LIST="$PATH_LIST $COPY_LIST"
-                  fi
-
-                  if [ "x$COPY_LIST_TO_TAR" != "x" ]; then
-                     PATH_LIST="$PATH_LIST $COPY_LIST_TO_TAR"
-                  fi
-
-                  if [ "x$PATH_LIST" == "x" ]; then
-                     echo "0"
-                  else
-                     AGE2=$(find -L $PATH_LIST ! -path '*/.git/*' ! -path '*/.tox/*' -type f -exec stat --format '%Y' "{}" \; | sort -nr | head -n 1)
-                     echo  "$AGE2"
-                  fi
-                  )
-             if [ $AGE2 -gt $AGE ]; then
-                AGE=$AGE2
-             fi
-         fi
-
-         TS=$(date -d @$AGE +%Y-%m-%dT%H:%M:%S)
-         for s in $(find $SRPM_PATH -name '*.src.rpm'); do
-             \cp -L -f $s $SRPM_OUT/
-             ss=$(basename $s)
-             touch $SRPM_OUT/$ss --date=$TS
-
-             mkdir -p $SOURCES_DIR/$ss
-             BIG_FLAG_FILE="$SOURCES_DIR/$ss/BIG"
-             SLOW_FLAG_FILE="$SOURCES_DIR/$ss/SLOW"
-
-             if [ $BUILD_IS_BIG -gt 0 ]; then
-                 echo $BUILD_IS_BIG >  $BIG_FLAG_FILE
-             else
-                 if [ -f $BIG_FLAG_FILE ]; then
-                     \rm -f $BIG_FLAG_FILE
-                 fi
-             fi
-
-             if [ $BUILD_IS_SLOW -gt 0 ]; then
-                 echo $BUILD_IS_SLOW > $SLOW_FLAG_FILE
-             else
-                 if [ -f $SLOW_FLAG_FILE ]; then
-                     \rm -f $SLOW_FLAG_FILE
-                 fi
-             fi
-
-             \rm -f -v "$REFERENCE_MD5"
-             \mv -v "$INPUT_FILES_MD5" "$REFERENCE_MD5"
-
-             local SPEC_DIR=$(spec_cache_dir_from_srpm $SRPM_OUT/$ss)
-             if [ -d $SPEC_DIR/BUILDS_VR ]; then
-                for f in $(ls -1 $SPEC_DIR/BUILDS_VR); do
-                    for r in $(find  $RPM_DIR -name "$f*rpm" 2>> /dev/null); do
-                       \rm -f -v $r
-                    done
-                done
-             fi
-
-             local RESULT_DIR=$(result_dir_from_srpm $SRPM_OUT/$ss)
-             if [ -d $RESULT_DIR ]; then
-                 echo "rm -rf $RESULT_DIR"
-                 \rm -rf $RESULT_DIR
-             fi
-         done
-
-         SRPM_REBUILT_LIST="$SRPM_REBUILT_LIST $TARGET_FOUND"
-         echo "===== Build complete for '$TARGET_FOUND' ====="
-         echo
-      fi
-   fi
-
-   return 0
-}
-
-(
-echo "$CMDLINE"
-
-if [ -L $BUILD_ROOT/repo ]; then
-    REPO_DEST=$(readlink $BUILD_ROOT/repo)
-    if [ "$REPO_DEST" != "$SRC_ROOT" ]; then
-        echo "Error: MY_REPO changed since last build"
-        echo "   old path: $REPO_DEST"
-        echo "   new path: $SRC_ROOT"
-        echo "Please run '$ME --clean' if you want to compile from a new source tree"
-        exit 1
-    fi
-fi
-
-if [ ! -L $BUILD_ROOT/repo ]; then
-    ln -s $SRC_ROOT $BUILD_ROOT/repo
-fi
-
-ALL=0
-UNRESOLVED_TARGETS=" "
-if [ "x$TARGETS" == "x" ]; then
-    echo "make: all"
-    ALL=1
-else
-    echo "make: $TARGETS"
-    UNRESOLVED_TARGETS="$TARGETS"
-fi
-
-if [ $EDIT_FLAG -eq 0 ]; then
-    if [ $CLEAN_FLAG -eq 1 ]; then
-        EXTRA_RPM_FLAGS=""
-
-        if [ $NO_BUILD_INFO -eq 1 ]; then
-           EXTRA_RPM_FLAGS+=" --no-build-info"
-        fi
-
-        if [ $BUILD_TYPE_FLAG -eq 1 ]; then
-           EXTRA_RPM_FLAGS+=" --$BUILD_TYPE"
-        fi
-
-        if [ $ALL -eq 1 ]; then
-           build-rpms-serial --clean  $EXTRA_RPM_FLAGS
-           rm -f $BUILD_ROOT/repo
-        else
-           build-rpms-serial --clean $EXTRA_RPM_FLAGS $TARGETS
-        fi
-    fi
-fi
-
-# create a build info
-if [ $CLEAN_FLAG -eq 0 ] && [ $EDIT_FLAG -eq 0 ] && [ $NO_BUILD_INFO -eq 0 ]; then
-    set_build_info
-fi
-
-for GIT_ROOT in $GIT_LIST; do
-   export GIT_BASE="$GIT_ROOT"
-   if echo "$GIT_ROOT" | grep -q do-not-build; then
-       continue
-   fi
-
-   for p in $(sed 's/#.*//' $GIT_ROOT/$PKG_DIRS_FILE 2>> /dev/null); do
-      src_dir="$GIT_ROOT/$p"
-      if [ -d $src_dir ]; then
-         if [ -d $src_dir/${DISTRO} ]; then
-            rel_dir=$(echo $src_dir | sed "s:^$SRC_BASE::")
-            work_dir="$BUILD_INPUTS$rel_dir"
-            build_dir $src_dir $work_dir
-            RC=$?
-            if [ $RC -ne 0 ]; then
-               if [ $RC -eq 1 ]; then
-                  VERB="build"
-
-                  if [ $EDIT_FLAG -eq 1 ]; then
-                     VERB="edit"
-                     if [ $CLEAN_FLAG -eq 1 ]; then
-                        VERB="edit clean"
-                     fi
-                  fi
-
-                  if [ $CLEAN_FLAG -eq 1 ]; then
-                     VERB="clean"
-                  fi
-                  echo "ERROR: Failed to $VERB src.rpm from source at '$p'"
-               fi
-               exit 1
-            fi
-         else
-            echo "ERROR: $FUNCNAME (${LINENO}): Failed to find 'centos' in '$p', found in file '$GIT_ROOT/$PKG_DIRS_FILE'"
-         fi
-      else
-         echo "ERROR: $FUNCNAME (${LINENO}): Bad path '$p' in file '$GIT_ROOT/$PKG_DIRS_FILE'"
-      fi
-   done
-done
-
-
-AUDIT_DIR=$(mktemp -d $MY_WORKSPACE/tmp-$USER-$ME-audit-XXXXXX)
-
-cleanup() {
-    if [ -n "$AUDIT_DIR" -a -d "$AUDIT_DIR" ]; then
-        echo "clean up $AUDIT_DIR"
-        rm -rf $AUDIT_DIR
-    fi
-}
-
-# cleanup temp files when system exiting
-trap cleanup EXIT
-
-# Try to find and clean orphaned and discontinued .src.rpm's 
-if [ $ALL -eq 1 ]; then
-    echo
-    echo "Auditing for obsolete srpms"
-    for GIT_ROOT in $GIT_LIST; do
-        if echo "$GIT_ROOT" | grep do-not-build; then
-            continue
-        fi
-
-        for p in $(cat $GIT_ROOT/$PKG_DIRS_FILE 2>> /dev/null); do
-            src_dir="$GIT_ROOT/$p"
-            if [ -d $src_dir ]; then
-                if [ -d $src_dir/$DISTRO ]; then
-
-                    for f in $(find $src_dir/${DISTRO} -name '*.spec' | sort -V); do
-                        NAME=$(spec_find_tag Name "$f" 2>> /dev/null)
-                        if [ $? -eq 0 ]; then
-                            touch "$AUDIT_DIR/$NAME"
-                        fi
-                    done
-                    if [ -f $src_dir/$SRPM_LIST_PATH ]; then
-
-                        for p in $(grep -v '^#' $src_dir/$SRPM_LIST_PATH | grep -v '^$'); do
-                           ORIG_SRPM_PATH=""
-                           # absolute path source rpms
-                           echo "$p" | grep "^/" >/dev/null && ORIG_SRPM_PATH=$p
-
-                           if [ "${ORIG_SRPM_PATH}x" == "x" ]; then
-                              # handle repo: definitions
-                              echo "$p" | grep "^repo:" >/dev/null && ORIG_SRPM_PATH=$(echo $p | sed "s%^repo:%$REPO_DOWNLOADS_ROOT/%")
-                           fi
-
-                           if [ "${ORIG_SRPM_PATH}x" == "x" ]; then
-                              # handle mirror: definitions
-                              echo "$p" | grep "^mirror:" >/dev/null && ORIG_SRPM_PATH=$(echo $p | sed "s%^mirror:%$MIRROR_ROOT/%" | sed "s#CentOS/tis-r3-CentOS/kilo/##" | sed "s#CentOS/tis-r3-CentOS/mitaka/##")
-                           fi
-
-                           if [ "${ORIG_SRPM_PATH}x" == "x" ]; then
-                              # we haven't found a valid prefix yet, so assume it's a legacy
-                              # file (mirror: interpretation)
-                              ORIG_SRPM_PATH="$MIRROR_ROOT/$p"
-                           fi
-
-                           if [ -f $ORIG_SRPM_PATH ]; then
-                               NAME=$(rpm -q --queryformat '%{NAME}\n' -p $ORIG_SRPM_PATH 2>> /dev/null)
-                               if [ $? -eq 0 ]; then
-                                   touch "$AUDIT_DIR/$NAME"
-                               fi
-                           fi
-                        done
-                    fi
-                fi
-            fi
-        done
-    done
-
-    echo "Auditing for obsolete srpms Phase 2"
-    for r in $(find $SRPM_OUT -name '*.src.rpm' | sort -V); do
-        NAME=$(rpm -q --queryformat '%{NAME}\n' -p $r 2>> /dev/null)
-        ALT_NAME=$(echo $NAME | sed "s#-$BUILD_TYPE\$##")
-        FOUND=0
-
-        if [[ -f "$AUDIT_DIR/$NAME" || ( "$BUILD_TYPE" != "std" && -f "$AUDIT_DIR/$ALT_NAME" ) ]]; then
-            FOUND=1
-        fi
-
-        if [ $FOUND -eq 0 ]; then
-            for INPUT_DIR in $(find $BUILD_INPUTS -name $NAME | sort -V); do
-                if [ -d "$INPUT_DIR/rpmbuild/SRPMS" ]; then
-                    clean_srpm_dir "$INPUT_DIR/rpmbuild/SRPMS" 0
-                fi
-                if [ -d $INPUT_DIR ]; then
-                    echo "rm -rf $r"
-                    \rm -rf $r
-                fi
-            done
-            if [ -f $r ]; then
-                \rm -f -v $r
-            fi
-        fi
-    done
-    echo "Delete $AUDIT_DIR"
-    \rm -rf "$AUDIT_DIR"
-fi
-echo "Auditing for obsolete srpms done"
-
-if [ $CLEAN_FLAG -eq 1 ]; then
-    if [ $ALL -eq 1 ]; then
-       \rm -rf $BUILD_INPUTS
-       \rm -rf $SOURCE_OUT/*.src.rpm
-    fi
-fi
-
-if [ $EDIT_FLAG -ne 1 ]; then
-   echo "==== Update repodata ====="
-   mkdir -p $SRPM_OUT/repodata
-   for d in $(find -L $SRPM_OUT -type d -name repodata); do
-      (cd $d/..
-       \rm -rf repodata
-       $CREATEREPO $(pwd)
-       create_lst $(pwd)
-      )
-   done
-   echo "==== Update repodata complete ====="
-fi
-
-FINAL_RC=0
-if [ $CLEAN_FLAG -eq 0 ] && [ $EDIT_FLAG -eq 0 ]; then
-    echo ""
-    if [ "$SRPM_FAILED_REBUILD_LIST" != "" ]; then
-       N=$(echo "$SRPM_FAILED_REBUILD_LIST" | wc -w)
-       echo "Failed to build $N packages:"
-       echo "   $SRPM_FAILED_REBUILD_LIST"
-       FINAL_RC=1
-    fi
-    if [ "$SRPM_REBUILT_LIST" != "" ]; then
-       N=$(echo "$SRPM_REBUILT_LIST" | wc -w)
-       echo "Successfully built $N packages:"
-       echo "   $SRPM_REBUILT_LIST"
-       echo ""
-       echo "Compiled src.rpm's can be found here: $SRPM_OUT"
-    fi
-    if [ "$SRPM_FAILED_REBUILD_LIST" == "" ] && [ "$SRPM_REBUILT_LIST" == "" ]; then
-       echo "No packages required a rebuild"
-    fi
-fi
-
-
-if [ "$UNRESOLVED_TARGETS" != " " ]; then
-    echo ""
-    echo "ERROR: $FUNCNAME (${LINENO}): failed to resolve build targets: $UNRESOLVED_TARGETS"
-    FINAL_RC=1
-fi
-
-exit $FINAL_RC
-) 2>&1 | stdbuf -o0 awk '{ print strftime("%H:%M:%S"), $0; fflush(); }' | tee $(date "+$MY_WORKSPACE/build-srpms-serial_%Y-%m-%d_%H-%M-%S.log") ; exit ${PIPESTATUS[0]}
diff --git a/build-tools/build-wheels/FIXME b/build-tools/build-wheels/FIXME
index 9a0560fe..d9435838 100644
--- a/build-tools/build-wheels/FIXME
+++ b/build-tools/build-wheels/FIXME
@@ -7,9 +7,8 @@ debian/Dockerfile:
 - convert thrifty & nss to wheels and don't install them in Dockerfile
 
 build-wheel-tarball.sh:
-- current DEB wheel packages install wheels at random locations, rather
-  than under /wheels as in CentOS. Fix them and remove the workaround
-  in this script.
+- current DEB wheel packages install wheels at random locations.
+  Fix them and remove the workaround in this script.
 
 build-wheel-tarball.sh:
 - look for wheels in non-Starlingx DEBs. Requires accessing repomgr via
diff --git a/build-tools/build-wheels/build-base-wheels.sh b/build-tools/build-wheels/build-base-wheels.sh
index 0199fa94..963e72b9 100755
--- a/build-tools/build-wheels/build-base-wheels.sh
+++ b/build-tools/build-wheels/build-base-wheels.sh
@@ -20,7 +20,7 @@ fi
 
 KEEP_IMAGE=no
 KEEP_CONTAINER=no
-SUPPORTED_OS_LIST=('centos' 'debian')
+SUPPORTED_OS_LIST=( 'debian' )
 OS=
 OS_VERSION=
 BUILD_STREAM=stable
@@ -38,7 +38,7 @@ Usage:
 $(basename $0) [ --os <os> ] [ --keep-image ] [ --keep-container ] [ --stream <stable|dev> ]
 
 Options:
-    --os:             Override base OS (eg. centos; default: auto)
+    --os:             Override base OS (eg. debian; default: auto)
     --os-version:     Override OS version (default: auto)
     --keep-image:     Skip deletion of the wheel build image in docker
     --keep-container: Skip deletion of container used for the build
diff --git a/build-tools/build-wheels/build-wheel-tarball.sh b/build-tools/build-wheels/build-wheel-tarball.sh
index 8324fd13..3dae69bc 100755
--- a/build-tools/build-wheels/build-wheel-tarball.sh
+++ b/build-tools/build-wheels/build-wheel-tarball.sh
@@ -17,7 +17,7 @@ if [ -z "${MY_WORKSPACE}" -o -z "${MY_REPO}" ]; then
     exit 1
 fi
 
-SUPPORTED_OS_ARGS=('centos' 'debian')
+SUPPORTED_OS_ARGS=( 'debian' )
 OS=
 OS_VERSION=
 BUILD_STREAM=stable
diff --git a/build-tools/build-wheels/centos/Dockerfile b/build-tools/build-wheels/centos/Dockerfile
deleted file mode 100644
index 2122315d..00000000
--- a/build-tools/build-wheels/centos/Dockerfile
+++ /dev/null
@@ -1,46 +0,0 @@
-ARG RELEASE=7.5.1804
-FROM centos:${RELEASE}
-
-ARG BUILD_STREAM=stable
-
-# Install the necessary packages for building the python modules.
-# Some of these are dependencies of the specific modules, and could
-# instead be added to the wheels.cfg file in the future.
-RUN set -ex ;\
-    sed -i '/\[main\]/ atimeout=120' /etc/yum.conf ;\
-    yum makecache ;\
-# nss>3.53.1 causes compile errors with some wheels
-    nss_rpms=$(echo nss nss-util nss-tools nss-sysinit nss-softokn \
-              nss-softokn-devel nss-softokn-freebl nss-devel \
-              nss-util-devel nss-softokn-freebl-devel) ;\
-    # install/upgrade all NSS packages @ v3.53.1
-    yum install -y $(echo $nss_rpms | awk -v RS=' ' '{print $1 "-3.53.1"}') ;\
-    # add "exclude=$nss_rpms" to the CentOS repo file
-    sed -i -r -e "/^\\s*[[]updates[]]/a exclude=$nss_rpms" /etc/yum.repos.d/CentOS-Base.repo ;\
-# install required packages
-    yum install -y epel-release centos-release-openstack-queens ;\
-    yum install -y git gcc zip bzip2 unzip \
-                   python3 python3-pip python3-wheel python3-devel \
-                   wget openldap-devel mariadb mariadb-devel \
-                   libvirt libvirt-devel liberasurecode-devel nss-devel \
-                   systemd-devel postgresql-devel ;\
-# pip<19.2.3 doesn't ignore yanked packages from pypi.org
-    python3 -m pip install pip==19.2.3 ;\
-# setuptools-scm's maintainers keep publishing and yanking new versions.
-# Pin it to latest version known to work
-    python3 -m pip install setuptools-scm==6.0.1 ;\
-# while setuptools is larger than 45.3, it no longer support "Features" in setup.py
-    python3 -m pip install --user setuptools==45.3 ;\
-    python3 -m pip install --user --upgrade wheel
-COPY docker-common/docker-build-wheel.sh /
-COPY centos/${BUILD_STREAM}-wheels.cfg /wheels.cfg
-
-# Python2 packages
-RUN set -ex; \
-    yum -y install python python-devel ;\
-    wget https://bootstrap.pypa.io/pip/2.7/get-pip.py ;\
-    python get-pip.py
-COPY centos/${BUILD_STREAM}-wheels-py2.cfg /wheels-py2.cfg
-
-# root CA cert expired on October 1st, 2021
-RUN yum update -y ca-certificates
diff --git a/build-tools/build-wheels/centos/dev-wheels-py2.cfg b/build-tools/build-wheels/centos/dev-wheels-py2.cfg
deleted file mode 100644
index 86ad94da..00000000
--- a/build-tools/build-wheels/centos/dev-wheels-py2.cfg
+++ /dev/null
@@ -1,20 +0,0 @@
-#
-# git: wheelname|git|git-source|basedir|branch
-# tar: wheelname|tar|wget-source|basedir
-# pypi: wheelname|pypi|wget-source
-# zip: wheelname|zip|wget-source|basedir
-#
-# If fix_setup must be called, add |fix_setup at the end of the line
-#
-# See doc/wheels-cfg.md for more info.
-#
-amqplib-1.0.2-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/75/b7/8c2429bf8d92354a0118614f9a4d15e53bc69ebedce534284111de5a0102/amqplib-1.0.2.tgz|amqplib-1.0.2
-lz4-0.9.0-cp27-cp27mu-linux_x86_64.whl|git|https://github.com/python-lz4/python-lz4|python-lz4|v0.9.0
-panko-5.0.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/a9/89/d666e0889d869e41c9b7f87a0a34858b2520782b82e025da84c98e0db8f6/panko-5.0.0.tar.gz|panko-5.0.0
-google_api_python_client-1.7.7-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/d7/47/940908e52487440f61fb93ad55cbbe3a28235d3bb143b26affb17b37dd28/google_api_python_client-1.7.7-py2.py3-none-any.whl
-neutron_lib-*.whl|git|https://github.com/openstack/neutron-lib|neutron-lib|master
-python_openstackclient-*.whl|git|https://github.com/openstack/python-openstackclient|python-openstackclient|master
-openstacksdk-*.whl|git|https://github.com/openstack/openstacksdk|openstacksdk|master
-networking_sfc-8.0.0.0b2-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/6a/a8/0e9bdd1f87dfb50682f23a01f590530ec8fa715e51127cf9f58d1905886c/networking_sfc-8.0.0.0b2-py2.py3-none-any.whl
-croniter-0.3.29-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/a9/c9/11182a2507798c661b04a7914739ea8ca73a738e6869a23742029f51bc1a/croniter-0.3.29-py2.py3-none-any.whl
-pecan-1.3.3-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/93/98/889d7615595e894f4f7e4c17d4008c822c8e39e650c8ab390cc6c39b99c4/pecan-1.3.3.tar.gz|pecan-1.3.3
diff --git a/build-tools/build-wheels/centos/dev-wheels.cfg b/build-tools/build-wheels/centos/dev-wheels.cfg
deleted file mode 100644
index f4c70d18..00000000
--- a/build-tools/build-wheels/centos/dev-wheels.cfg
+++ /dev/null
@@ -1,21 +0,0 @@
-#
-# git: wheelname|git|git-source|basedir|branch
-# tar: wheelname|tar|wget-source|basedir
-# pypi: wheelname|pypi|wget-source
-# zip: wheelname|zip|wget-source|basedir
-#
-# If fix_setup must be called, add |fix_setup at the end of the line
-#
-# See doc/wheels-cfg.md for more info.
-#
-amqplib-1.0.2-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/75/b7/8c2429bf8d92354a0118614f9a4d15e53bc69ebedce534284111de5a0102/amqplib-1.0.2.tgz|amqplib-1.0.2
-croniter-0.3.29-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/a9/c9/11182a2507798c661b04a7914739ea8ca73a738e6869a23742029f51bc1a/croniter-0.3.29-py2.py3-none-any.whl
-google_api_python_client-1.7.7-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/d7/47/940908e52487440f61fb93ad55cbbe3a28235d3bb143b26affb17b37dd28/google_api_python_client-1.7.7-py2.py3-none-any.whl
-lz4-0.9.0-cp36-cp36m-linux_x86_64.whl|git|https://github.com/python-lz4/python-lz4|python-lz4|v0.9.0
-networking_sfc-8.0.0.0b2-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/6a/a8/0e9bdd1f87dfb50682f23a01f590530ec8fa715e51127cf9f58d1905886c/networking_sfc-8.0.0.0b2-py2.py3-none-any.whl
-neutron_lib-*.whl|git|https://github.com/openstack/neutron-lib|neutron-lib|master
-python_openstackclient-*.whl|git|https://github.com/openstack/python-openstackclient|python-openstackclient|master
-openstacksdk-*.whl|git|https://github.com/openstack/openstacksdk|openstacksdk|master
-panko-5.0.0-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/a9/89/d666e0889d869e41c9b7f87a0a34858b2520782b82e025da84c98e0db8f6/panko-5.0.0.tar.gz|panko-5.0.0
-pecan-1.3.3-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/93/98/889d7615595e894f4f7e4c17d4008c822c8e39e650c8ab390cc6c39b99c4/pecan-1.3.3.tar.gz|pecan-1.3.3
-
diff --git a/build-tools/build-wheels/centos/openstack.cfg b/build-tools/build-wheels/centos/openstack.cfg
deleted file mode 100644
index 8af22102..00000000
--- a/build-tools/build-wheels/centos/openstack.cfg
+++ /dev/null
@@ -1,13 +0,0 @@
-# This file specifies constraint/requirement URLs for current and python2
-# openstack branches
-
-# Current/stable
-STABLE_OPENSTACK_REQ_URL="https://raw.githubusercontent.com/openstack/requirements/stable/ussuri"
-# Current/experimental (for dev images)
-MASTER_OPENSTACK_REQ_URL="https://raw.githubusercontent.com/openstack/requirements/master"
-
-# Python2/stable
-STABLE_OPENSTACK_REQ_URL_PY2="https://opendev.org/openstack/requirements/raw/commit/2da5c5045118b0e36fb14427872e4b9b37335071"
-# Python2/experimental (for dev images)
-MASTER_OPENSTACK_REQ_URL_PY2="https://raw.githubusercontent.com/openstack/requirements/stable/train"
-
diff --git a/build-tools/build-wheels/centos/stable-wheels-py2.cfg b/build-tools/build-wheels/centos/stable-wheels-py2.cfg
deleted file mode 100644
index 011c6575..00000000
--- a/build-tools/build-wheels/centos/stable-wheels-py2.cfg
+++ /dev/null
@@ -1,178 +0,0 @@
-#
-# git: wheelname|git|git-source|basedir|branch
-# tar: wheelname|tar|wget-source|basedir
-# pypi: wheelname|pypi|wget-source
-# zip: wheelname|zip|wget-source|basedir
-#
-# If fix_setup must be called, add |fix_setup at the end of the line
-#
-# See doc/wheels-cfg.md for more info.
-#
-abclient-0.2.3-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/49/eb/091b02c1e36d68927adfb746706e2c80f7e7bfb3f16e3cbcfec2632118ab/abclient-0.2.3.tar.gz|abclient-0.2.3
-alembic-1.1.0-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/9a/0f/a5e8997d58882da8ecd288360dddf133a83145de6480216774923b393422/alembic-1.1.0.tar.gz|alembic-1.1.0
-amqplib-1.0.2-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/75/b7/8c2429bf8d92354a0118614f9a4d15e53bc69ebedce534284111de5a0102/amqplib-1.0.2.tgz|amqplib-1.0.2
-anyjson-0.3.3-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/c3/4d/d4089e1a3dd25b46bebdb55a992b0797cff657b4477bc32ce28038fdecbc/anyjson-0.3.3.tar.gz|anyjson-0.3.3
-backports.ssl_match_hostname-3.7.0.1-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/ff/2b/8265224812912bc5b7a607c44bf7b027554e1b9775e9ee0de8032e3de4b2/backports.ssl_match_hostname-3.7.0.1.tar.gz|backports.ssl_match_hostname-3.7.0.1|fix_setup
-bottle-0.12.17-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/c4/a5/6bf41779860e9b526772e1b3b31a65a22bd97535572988d16028c5ab617d/bottle-0.12.17.tar.gz|bottle-0.12.17
-cassandra_driver-3.19.0-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/1c/fe/e4df42a3e864b6b7b2c7f6050b66cafc7fba8b46da0dfb9d51867e171a77/cassandra-driver-3.19.0.tar.gz|cassandra-driver-3.19.0
-cmd2-0.8.9-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/e9/40/a71caa2aaff10c73612a7106e2d35f693e85b8cf6e37ab0774274bca3cf9/cmd2-0.8.9-py2.py3-none-any.whl
-construct-2.8.22-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/e5/c6/3e3aeef38bb0c27364af3d21493d9690c7c3925f298559bca3c48b7c9419/construct-2.8.22.tar.gz|construct-2.8.22
-crc16-0.1.1-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/a6/e0/70a44c4385f2b33df82e518005aae16b5c1feaf082c73c0acebe3426fc0a/crc16-0.1.1.tar.gz|crc16-0.1.1|fix_setup
-demjson-2.2.4-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/96/67/6db789e2533158963d4af689f961b644ddd9200615b8ce92d6cad695c65a/demjson-2.2.4.tar.gz|demjson-2.2.4|fix_setup
-django_floppyforms-1.7.0-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/8c/18/30a9137c7ae279a27ccdeb10f6fe8be18ee98551d01ec030b6cfe8b2d2e2/django-floppyforms-1.7.0.tar.gz|django-floppyforms-1.7.0
-django_pyscss-2.0.2-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/4b/7f/d771802305184aac6010826f60a0b2ecaa3f57d19ab0e405f0c8db07e809/django-pyscss-2.0.2.tar.gz|django-pyscss-2.0.2
-docopt-0.6.2-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/a2/55/8f8cab2afd404cf578136ef2cc5dfb50baa1761b68c9da1fb1e4eed343c9/docopt-0.6.2.tar.gz|docopt-0.6.2
-dogpile.cache-0.7.1-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/84/3e/dbf1cfc5228f1d3dca80ef714db2c5aaec5cd9efaf54d7e3daef6bc48b19/dogpile.cache-0.7.1.tar.gz|dogpile.cache-0.7.1
-enum_compat-0.0.2-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/95/6e/26bdcba28b66126f66cf3e4cd03bcd63f7ae330d29ee68b1f6b623550bfa/enum-compat-0.0.2.tar.gz|enum-compat-0.0.2
-etcd3-0.10.0-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/09/f1/93603a26daf7a993a0acbbcfd32afce8b2fdf30a765d5651571ab635969b/etcd3-0.10.0.tar.gz|etcd3-0.10.0
-exabgp-4.1.2-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/b9/f1/f2417bc82c9caa220fcd369a3b55ac895088bcc8afc262e4bb07d48aa40c/exabgp-4.1.2.tar.gz|exabgp-4.1.2
-flask_keystone-0.2-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/1f/ca/3938de8c5f4a3d1c5dd4278bedb9d31d79816feba4d088293c620a366fb1/flask_keystone-0.2.tar.gz|flask_keystone-0.2
-flask_oslolog-0.1-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/a7/62/fec02ce761b548b1289680bb1be1aa0bce2b2c4017d5b31bd6c67c78aef9/flask_oslolog-0.1.tar.gz|flask_oslolog-0.1
-fortiosclient-0.0.3-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/e9/aa/b2c0705d5e52c8d9af35422d940800b49c562758fbdad3179a6fbf6e92f5/fortiosclient-0.0.3.tar.gz|fortiosclient-0.0.3
-frozendict-1.2-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/4e/55/a12ded2c426a4d2bee73f88304c9c08ebbdbadb82569ebdd6a0c007cfd08/frozendict-1.2.tar.gz|frozendict-1.2
-funcparserlib-0.3.6-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/cb/f7/b4a59c3ccf67c0082546eaeb454da1a6610e924d2e7a2a21f337ecae7b40/funcparserlib-0.3.6.tar.gz|funcparserlib-0.3.6
-functools32-3.2.3.post2-py2-none-any.whl|git|https://github.com/MiCHiLU/python-functools32|python-functools32|3.2.3-2|fix_setup
-future-0.17.1-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/90/52/e20466b85000a181e1e144fd8305caf2cf475e2f9674e797b222f8105f5f/future-0.17.1.tar.gz|future-0.17.1
-happybase-1.2.0-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/d1/9c/f5f7bdb5439cda2b7da4e20ac24ec0e2455fd68aade8397f211d2994c39d/happybase-1.2.0.tar.gz|happybase-1.2.0
-hiredis-1.0.0-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/9e/e0/c160dbdff032ffe68e4b3c576cba3db22d8ceffc9513ae63368296d1bcc8/hiredis-1.0.0.tar.gz|hiredis-1.0.0
-httplib2-0.13.1-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/78/23/bb9606e87a66fd8c72a2b1a75b049d3859a122bc2648915be845bc44e04f/httplib2-0.13.1.tar.gz|httplib2-0.13.1
-itsdangerous-1.1.0-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/76/ae/44b03b253d6fade317f32c24d100b3b35c2239807046a4c953c7b89fa49e/itsdangerous-1.1.0-py2.py3-none-any.whl
-jaeger_client-4.1.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/f1/da/569a4f1bc3d0c412c7f903053f09ef62fa10949374ca90bc852b22dd3860/jaeger-client-4.1.0.tar.gz|jaeger-client-4.1.0
-jsonpath_rw-1.4.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/71/7c/45001b1f19af8c4478489fbae4fc657b21c4c669d7a5a036a86882581d85/jsonpath-rw-1.4.0.tar.gz|jsonpath-rw-1.4.0
-krest-1.3.1-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/fb/d2/9dbbd3a76f2385041720a0eb51ddab676e688fa8bee8a1489470839616cf/krest-1.3.1.tar.gz|krest-1.3.1
-#libvirt_python-4.4.0-cp27-none-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/2b/8d/1160cf34dc3d296896eb5c8f4944439ea368b87d2d2431f58d08d6bdf374/libvirt-python-4.4.0.tar.gz|libvirt-python-4.4.0|fix_setup
-logutils-0.3.5-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/49/b2/b57450889bf73da26027f8b995fd5fbfab258ec24ef967e4c1892f7cb121/logutils-0.3.5.tar.gz|logutils-0.3.5|fix_setup
-lz4-0.9.0-cp27-cp27mu-linux_x86_64.whl|git|https://github.com/python-lz4/python-lz4|python-lz4|v0.9.0
-Mako-1.1.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/b0/3c/8dcd6883d009f7cae0f3157fb53e9afb05a0d3d33b3db1268ec2e6f4a56b/Mako-1.1.0.tar.gz|Mako-1.1.0
-marathon-0.11.0-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/97/e3/f036af0d94f98d199233faa71b5bcbef8b8e8e634551940d98c95d276e4f/marathon-0.11.0-py2.py3-none-any.whl
-MarkupSafe-1.1.1-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz|MarkupSafe-1.1.1
-mox-0.5.3-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/0c/a1/64740c638cc5fae807022368f4141700518ee343b53eb3e90bf3cc15a4d4/mox-0.5.3.tar.gz|mox-0.5.3|fix_setup
-migrate-0.3.8-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/ce/31/1a4cbf8dc0536c55f41072e8ea37b3df1e412262dc731c57e5bb099eb9b2/migrate-0.3.8.tar.gz|migrate-0.3.8
-mpmath-1.1.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/ca/63/3384ebb3b51af9610086b23ea976e6d27d6d97bf140a76a365bd77a3eb32/mpmath-1.1.0.tar.gz|mpmath-1.1.0|fix_setup
-msgpack_python-0.4.8-cp27-cp27mu-linux_x86_64.whl|git|https://github.com/msgpack/msgpack-python.git|msgpack-python|0.4.8
-munch-2.3.2-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/68/f4/260ec98ea840757a0da09e0ed8135333d59b8dfebe9752a365b04857660a/munch-2.3.2.tar.gz|munch-2.3.2
-ndg_httpsclient-0.5.1-py2-none-any.whl|pypi|https://files.pythonhosted.org/packages/bf/b2/26470fde7ff55169df8e071fb42cb1f83e22bd952520ab2b5c5a5edc2acd/ndg_httpsclient-0.5.1-py2-none-any.whl
-netifaces-0.10.9-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/0d/18/fd6e9c71a35b67a73160ec80a49da63d1eed2d2055054cc2995714949132/netifaces-0.10.9.tar.gz|netifaces-0.10.9
-networking_sfc-8.0.0.0b2-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/6a/a8/0e9bdd1f87dfb50682f23a01f590530ec8fa715e51127cf9f58d1905886c/networking_sfc-8.0.0.0b2-py2.py3-none-any.whl
-networkx-2.2-py2.py3-none-any.whl|zip|https://files.pythonhosted.org/packages/f3/f4/7e20ef40b118478191cec0b58c3192f822cace858c19505c7670961b76b2/networkx-2.2.zip|networkx-2.2
-neutron_lib-1.29.1-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/6b/dd/548cbb7a936de18aa642372927e409540d8f5d96a2f7650c4d1197845f3c/neutron_lib-1.29.1-py2.py3-none-any.whl
-nodeenv-1.3.3-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/00/6e/ed417bd1ed417ab3feada52d0c89ab0ed87d150f91590badf84273e047c9/nodeenv-1.3.3.tar.gz|nodeenv-1.3.3
-nose_exclude-0.5.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/63/cf/90c4be56bf11b7bc8801086d9445baf731aa36b8e8fc5791731e8e604dcd/nose-exclude-0.5.0.tar.gz|nose-exclude-0.5.0
-nosehtmloutput-0.0.5-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/0c/f7/6cb16c0b233d3f2d62be38ddb7d7c1bc967188c41575ecf0312e6575730d/nosehtmloutput-0.0.5.tar.gz|nosehtmloutput-0.0.5
-openshift-0.8.6-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/73/ed/c92c0ba23b6c4c8e5542151a1b89cb8ff01f68a72fe68f6c95a28d885ebe/openshift-0.8.6.tar.gz|openshift-0.8.6
-openstack.nose_plugin-0.11-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/bc/83/e7c9b9297e1a501d2c2617f98d6176199570e8ee32f0e72669c8852c6c81/openstack.nose_plugin-0.11.tar.gz|openstack.nose_plugin-0.11
-opentracing-2.2.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/94/9f/289424136addf621fb4c75624ef9a3a80e8575da3993a87950c57e93217e/opentracing-2.2.0.tar.gz|opentracing-2.2.0
-ovs-2.11.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/81/06/387b2159ac073de95e484aa6e2f108a232cd906e350307168843061f899f/ovs-2.11.0.tar.gz|ovs-2.11.0
-panko-5.0.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/a9/89/d666e0889d869e41c9b7f87a0a34858b2520782b82e025da84c98e0db8f6/panko-5.0.0.tar.gz|panko-5.0.0
-pathlib-1.0.1-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/ac/aa/9b065a76b9af472437a0059f77e8f962fe350438b927cb80184c32f075eb/pathlib-1.0.1.tar.gz|pathlib-1.0.1|fix_setup
-pecan-1.3.3-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/93/98/889d7615595e894f4f7e4c17d4008c822c8e39e650c8ab390cc6c39b99c4/pecan-1.3.3.tar.gz|pecan-1.3.3
-pifpaf-2.2.2-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/33/dc/4f276c55d94cd73fc1f94e2d23f34b476fea38d240e3e17b837a5749bc9f/pifpaf-2.2.2-py2.py3-none-any.whl
-pika_pool-0.1.3-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/ec/48/50c8f02a3eef4cb824bec50661ec1713040402cc1b2a38954dc977a59c23/pika-pool-0.1.3.tar.gz|pika-pool-0.1.3
-Pint-0.9-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/15/9d/bf177ebbc57d25e9e296addc14a1303d1e34d7964af5df428a8332349c42/Pint-0.9-py2.py3-none-any.whl
-ply-3.11-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/a3/58/35da89ee790598a0700ea49b2a66594140f44dec458c07e8e3d4979137fc/ply-3.11-py2.py3-none-any.whl
-positional-1.1.2-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/8c/16/64a4fa0967c486380468dca18867d22ac1c17bba06349e31ace77c7757f7/positional-1.1.2.tar.gz|positional-1.1.2
-prettytable-0.7.2-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/e0/a1/36203205f77ccf98f3c6cf17cf068c972e6458d7e58509ca66da949ca347/prettytable-0.7.2.tar.gz|prettytable-0.7.2
-proboscis-1.2.6.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/3c/c8/c187818ab8d0faecdc3c16c1e0b2e522f3b38570f0fb91dcae21662019d0/proboscis-1.2.6.0.tar.gz|proboscis-1.2.6.0
-psutil-5.6.3-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/1c/ca/5b8c1fe032a458c2c4bcbe509d1401dca9dda35c7fc46b36bb81c2834740/psutil-5.6.3.tar.gz|psutil-5.6.3
-psycopg2-2.8.3-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/5c/1c/6997288da181277a0c29bc39a5f9143ff20b8c99f2a7d059cfb55163e165/psycopg2-2.8.3.tar.gz|psycopg2-2.8.3
-PuLP-1.6.10-py2-none-any.whl|zip|https://files.pythonhosted.org/packages/2d/33/3ae6d9d2ac8c7068937af6372fd8828ac605e62a8b17106fe57110930d38/PuLP-1.6.10.zip|PuLP-1.6.10
-pycparser-2.19-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/68/9e/49196946aee219aead1290e00d1e7fdeab8567783e83e1b9ab5585e6206a/pycparser-2.19.tar.gz|pycparser-2.19
-pycrypto-2.6.1-cp27-cp27mu-linux_x86_64.whl|git|https://github.com/dlitz/pycrypto|pycrypto|v2.6.1|fix_setup
-pycryptodomex-3.9.0-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/e4/90/a01cafbbad7466491e3a630bf1d734294a32ff1b10e7429e9a4e8478669e/pycryptodomex-3.9.0.tar.gz|pycryptodomex-3.9.0
-pydot-1.4.1-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/33/d1/b1479a770f66d962f545c2101630ce1d5592d90cb4f083d38862e93d16d2/pydot-1.4.1-py2.py3-none-any.whl
-pydotplus-2.0.2-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/60/bf/62567830b700d9f6930e9ab6831d6ba256f7b0b730acb37278b0ccdffacf/pydotplus-2.0.2.tar.gz|pydotplus-2.0.2
-pyeclib-1.6.0-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/aa/d6/ca6bba5e66fc7a9810a995b17a3675492da2bec405806d8ac3db18cfd93b/pyeclib-1.6.0.tar.gz|pyeclib-1.6.0|fix_setup
-pyinotify-0.9.6-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/e3/c0/fd5b18dde17c1249658521f69598f3252f11d9d7a980c5be8619970646e1/pyinotify-0.9.6.tar.gz|pyinotify-0.9.6
-pykerberos-1.2.1-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/9a/b8/1ec56b6fa8a2e2a81420bd3d90e70b59fc83f6b857fb2c2c37accddc8be3/pykerberos-1.2.1.tar.gz|pykerberos-1.2.1
-PyKMIP-0.9.1-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/24/b2/258332aea85163f49a187337e8c85ee4529eb499b84fe0a6fe2d1a9c8d25/PyKMIP-0.9.1.tar.gz|PyKMIP-0.9.1
-pylxd-2.2.10-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/49/9a/eba58646721ffbff40dc41571b13c9528fdc4e26a82252318c997cdbe26a/pylxd-2.2.10.tar.gz|pylxd-2.2.10
-pyngus-2.3.0-py2-none-any.whl|zip|https://files.pythonhosted.org/packages/58/b1/336b8f64e7e4efa9b95027af71e02cd4cfacca8f919345badb852381878a/pyngus-2.3.0.zip|pyngus-2.3.0
-pyperclip-1.7.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/2d/0f/4eda562dffd085945d57c2d9a5da745cfb5228c02bc90f2c74bbac746243/pyperclip-1.7.0.tar.gz|pyperclip-1.7.0
-pyroute2-0.5.6-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/f6/80/16a604075345f0c253537d55e5c5282a37c61a1fc8ee0fcc42d1fd2a0739/pyroute2-0.5.6.tar.gz|pyroute2-0.5.6|fix_setup
-pyrsistent-0.15.4-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/b9/66/b2638d96a2d128b168d0dba60fdc77b7800a9b4a5340cefcc5fc4eae6295/pyrsistent-0.15.4.tar.gz|pyrsistent-0.15.4
-pyScss-1.3.4-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/1d/4a/221ae7561c8f51c4f28b2b172366ccd0820b14bb947350df82428dfce381/pyScss-1.3.4.tar.gz|pyScss-1.3.4
-pysendfile-2.0.1-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/cd/3f/4aa268afd0252f06b3b487c296a066a01ddd4222a46b7a3748599c8fc8c3/pysendfile-2.0.1.tar.gz|pysendfile-2.0.1
-pystache-0.5.4-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/d6/fd/eb8c212053addd941cc90baac307c00ac246ac3fce7166b86434c6eae963/pystache-0.5.4.tar.gz|pystache-0.5.4
-python_cinderclient-4.3.0-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/f1/09/760c454c5bf67509d7f8479d583a3e84411f51ec2a1942aea3741a49b090/python_cinderclient-4.3.0-py2.py3-none-any.whl
-python_consul-1.1.0-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/3f/d0/59bc5f1c6c4d4b498c41d8ce7052ee9e9d68be19e16038a55252018a6c4d/python_consul-1.1.0-py2.py3-none-any.whl
-python_editor-1.0.4-py2-none-any.whl|pypi|https://files.pythonhosted.org/packages/55/a0/3c0ba1c10f2ca381645dd46cb7afbb73fddc8de9f957e1f9e726a846eabc/python_editor-1.0.4-py2-none-any.whl
-python_etcd-0.4.5-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/a1/da/616a4d073642da5dd432e5289b7c1cb0963cc5dde23d1ecb8d726821ab41/python-etcd-0.4.5.tar.gz|python-etcd-0.4.5
-python_ldap-3.2.0-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/ea/93/596f875e003c770447f4b99267820a0c769dd2dc3ae3ed19afe460fcbad0/python-ldap-3.2.0.tar.gz|python-ldap-3.2.0
-python_memcached-1.59-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/f5/90/19d3908048f70c120ec66a39e61b92c253e834e6e895cd104ce5e46cbe53/python_memcached-1.59-py2.py3-none-any.whl
-python_nss-1.0.1-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/6b/29/629098e34951c358b1f04f13a70b3590eb0cf2df817d945bd05c4169d71b/python-nss-1.0.1.tar.bz2|python-nss-1.0.1|fix_setup
-python_pcre-0.7-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/9d/af/61435bd163f01fe3709fca9b1f79e4978d8089ee671d2e004fc85e10de29/python-pcre-0.7.tar.gz|python-pcre-0.7|fix_setup
-python_pytun-2.3.0-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/52/a4/a062106c739eac79c8160fcf5779ebc84afc1c38b016ab216ed1e6da69b6/python-pytun-2.3.0.tar.gz|python-pytun-2.3.0|fix_setup
-python_qpid_proton-0.28.0-cp27-cp27mu-linux_x86_64.whl|zip|https://files.pythonhosted.org/packages/96/35/2c86d844aec1acdfe7778966994aa270fcf03f076df393003bd4fc07dfa9/python-qpid-proton-0.28.0.zip|python-qpid-proton-0.28.0|fix_setup
-python_string_utils-0.6.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/5d/13/216f2d4a71307f5a4e5782f1f59e6e8e5d6d6c00eaadf9f92aeccfbb900c/python-string-utils-0.6.0.tar.gz|python-string-utils-0.6.0|fix_setup
-pyudev-0.21.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/bc/a2/31a07829acea8e70a28c247f43fa5d981229ae0f9edfeddedf52de00709b/pyudev-0.21.0.tar.gz|pyudev-0.21.0
-PyYAML-5.1.2-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/e3/e8/b3212641ee2718d556df0f23f78de8303f068fe29cdaa7a91018849582fe/PyYAML-5.1.2.tar.gz|PyYAML-5.1.2
-pyzabbix-0.7.5-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/11/ad/24e19d0cf16d05b7ee19f337f02058ee9b760649171865469ccceef83027/pyzabbix-0.7.5.tar.gz|pyzabbix-0.7.5
-qpid_python-1.36.0.post1-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/2a/33/026ac50a29a85d5d54dd7784a98d624f6142cb07ce185ed268ef9bd3b6dc/qpid-python-1.36.0-1.tar.gz|qpid-python-1.36.0-1|fix_setup
-rcssmin-1.0.6-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/e2/5f/852be8aa80d1c24de9b030cdb6532bc7e7a1c8461554f6edbe14335ba890/rcssmin-1.0.6.tar.gz|rcssmin-1.0.6|fix_setup
-repoze.lru-0.7-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz|repoze.lru-0.7
-requests_aws-0.1.8-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/5e/2f/4da17752036c04cf4c9af7a2da0d41ef2205043f1c61008006475aa24b8b/requests-aws-0.1.8.tar.gz|requests-aws-0.1.8
-restructuredtext_lint-1.3.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/62/76/bd8760de759fb74d7863e6935200af101cb128a7de008741a4e22341d03c/restructuredtext_lint-1.3.0.tar.gz|restructuredtext_lint-1.3.0
-retrying-1.3.3-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/44/ef/beae4b4ef80902f22e3af073397f079c96969c69b2c7d52a57ea9ae61c9d/retrying-1.3.3.tar.gz|retrying-1.3.3
-rfc3986-1.4.0-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/78/be/7b8b99fd74ff5684225f50dd0e865393d2265656ef3b4ba9eaaaffe622b8/rfc3986-1.4.0-py2.py3-none-any.whl
-rjsmin-1.1.0-cp27-cp27mu-manylinux1_x86_64.whl|pypi|https://files.pythonhosted.org/packages/c3/8e/079b7cc3a0fc9934ab05d868a00183c7aafd90b5d6138313d98ac2b9f666/rjsmin-1.1.0-cp27-cp27mu-manylinux1_x86_64.whl
-rtslib_fb-2.1.69-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/fc/1a/77a26207bdad13cc39b93d874b3a1b04e5a0b0332fb716e4d654537bacdb/rtslib-fb-2.1.69.tar.gz|rtslib-fb-2.1.69
-scandir-1.10.0-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz|scandir-1.10.0
-scrypt-0.8.13-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/80/3d/141eb80e754b86f6c25a2ffaf6c3af3acdb65a3e3700829a05ab0c5d965d/scrypt-0.8.13.tar.gz|scrypt-0.8.13
-SecretStorage-2.3.1-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/a5/a5/0830cfe34a4cfd0d1c3c8b614ede1edb2aaf999091ac8548dd19cb352e79/SecretStorage-2.3.1.tar.gz|SecretStorage-2.3.1
-setproctitle-1.1.10-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz|setproctitle-1.1.10
-simplegeneric-0.8.1-py2-none-any.whl|zip|https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip|simplegeneric-0.8.1
-simplejson-3.16.0-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/e3/24/c35fb1c1c315fc0fffe61ea00d3f88e85469004713dab488dee4f35b0aff/simplejson-3.16.0.tar.gz|simplejson-3.16.0
-skydive_client-0.5.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/e4/68/78a246619d9b16bb226562c155f18f798283f86db8f01a89c30b97ac7a27/skydive-client-0.5.0.tar.gz|skydive-client-0.5.0
-smmap2-2.0.5-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/55/d2/866d45e3a121ee15a1dc013824d58072fd5c7799c9c34d01378eb262ca8f/smmap2-2.0.5-py2.py3-none-any.whl
-sphinxcontrib_fulltoc-1.2.0-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/8e/a6/d1297db9b75650681e5429e92e13df139ee6b64303ff1b2eea4ebd32c0a9/sphinxcontrib-fulltoc-1.2.0.tar.gz|sphinxcontrib-fulltoc-1.2.0
-sphinxcontrib_pecanwsme-0.10.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/75/2b/105d07f47485ecf774cd80b881c29e148182b72a3a60596abdd016c87fce/sphinxcontrib-pecanwsme-0.10.0.tar.gz|sphinxcontrib-pecanwsme-0.10.0
-SQLAlchemy-1.3.8-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/fc/49/82d64d705ced344ba458197dadab30cfa745f9650ee22260ac2b275d288c/SQLAlchemy-1.3.8.tar.gz|SQLAlchemy-1.3.8
-SQLAlchemy_Utils-0.34.2-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/45/61/3bdd2931e86253fa7df6445a26929fbcc9bc43ad6b27a10f991eb6ecde75/SQLAlchemy-Utils-0.34.2.tar.gz|SQLAlchemy-Utils-0.34.2
-stomp.py-4.1.22-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/52/7e/22ca617f61e0d5904e06c1ebd5d453adf30099526c0b64dca8d74fff0cad/stomp.py-4.1.22.tar.gz|stomp.py-4.1.22
-subprocess32-3.5.4-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz|subprocess32-3.5.4
-suds_jurko-0.6-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/bd/6f/54fbf0999a606680d27c69b1ad12dfff62768ecb9fe48524cebda6eb4423/suds-jurko-0.6.tar.bz2|suds-jurko-0.6
-systemd_python-234-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/e8/a8/00ba0f605837a8f69523e6c3a4fb14675a6430c163f836540129c50b3aef/systemd-python-234.tar.gz|systemd-python-234|fix_setup
-sysv_ipc-1.0.0-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/08/7d/a862f3045fa191eeece23650725273f2ccaf9ac6b95443dfe4cac6508638/sysv_ipc-1.0.0.tar.gz|sysv_ipc-1.0.0|fix_setup
-Tempita-0.5.2-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz|Tempita-0.5.2
-termcolor-1.1.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz|termcolor-1.1.0|fix_setup
-testrepository-0.0.20-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/0c/85/f495b58b2b0ac907def07385219e9747b75840fa01280f228546a4a5ad7f/testrepository-0.0.20.tar.gz|testrepository-0.0.20
-thrift-0.11.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/c6/b4/510617906f8e0c5660e7d96fbc5585113f83ad547a3989b80297ac72a74c/thrift-0.11.0.tar.gz|thrift-0.11.0
-thriftpy-0.3.9-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/f4/19/cca118cf7d2087310dbc8bd70dc7df0c1320f2652873a93d06d7ba356d4a/thriftpy-0.3.9.tar.gz|thriftpy-0.3.9
-thriftpy2-0.4.8-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/2c/23/57b00b3d5d3d0ae66d79844a39d3c3b92dde3063c901036808602137d3ab/thriftpy2-0.4.8.tar.gz|thriftpy2-0.4.8
-tinyrpc-1.0.3-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/21/7a/ff1a74256e1bcc04fbaa414c13a2bb79a29ac9918b25f2238592b991e3bc/tinyrpc-1.0.3.tar.gz|tinyrpc-1.0.3
-tornado-4.5.3-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/e3/7b/e29ab3d51c8df66922fea216e2bddfcb6430fb29620e5165b16a216e0d3c/tornado-4.5.3.tar.gz|tornado-4.5.3
-trollius-2.2.post1-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/0b/31/356ae13ad4df58f963e9954d55118f6cffdb3a903c1547973ad7bc347fb9/trollius-2.2.post1.tar.gz|trollius-2.2.post1
-ujson-1.35-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/16/c4/79f3409bc710559015464e5f49b9879430d8f87498ecdc335899732e5377/ujson-1.35.tar.gz|ujson-1.35
-unicodecsv-0.14.1-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/6f/a4/691ab63b17505a26096608cc309960b5a6bdf39e4ba1a793d5f9b1a53270/unicodecsv-0.14.1.tar.gz|unicodecsv-0.14.1
-uWSGI-2.0.17.1-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/a2/c9/a2d5737f63cd9df4317a4acc15d1ddf4952e28398601d8d7d706c16381e0/uwsgi-2.0.17.1.tar.gz|uwsgi-2.0.17.1
-voluptuous-0.11.7-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/24/3b/fe531688c0d9e057fccc0bc9430c0a3d4b90e0d2f015326e659c2944e328/voluptuous-0.11.7.tar.gz|voluptuous-0.11.7
-warlock-1.3.3-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/c2/36/178b26a338cd6d30523246da4721b1114306f588deb813f3f503052825ee/warlock-1.3.3.tar.gz|warlock-1.3.3
-weakrefmethod-1.0.3-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/99/82/73a21e3eab9a1ff76d12375f7301fba5c6325b9598eed0ae5b0cf5243656/weakrefmethod-1.0.3.tar.gz|weakrefmethod-1.0.3
-websockify-0.9.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/c4/5b/16ec1e9f4fc536846d95a01a77d97da12f8042ca5cf83cdf3dd0442e881c/websockify-0.9.0.tar.gz|websockify-0.9.0
-whereto-0.4.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/80/83/371a699ce90257608592dadca400a7ecd9a2db6137d78f6f433c7c5e3197/whereto-0.4.0.tar.gz|whereto-0.4.0
-wrapt-1.11.2-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/23/84/323c2415280bc4fc880ac5050dddfb3c8062c2552b34c2e512eb4aa68f79/wrapt-1.11.2.tar.gz|wrapt-1.11.2|fix_setup
-ws4py-0.5.1-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/53/20/4019a739b2eefe9282d3822ef6a225250af964b117356971bd55e274193c/ws4py-0.5.1.tar.gz|ws4py-0.5.1
-WSME-0.9.3-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/d1/b6/8027248bfca3ce192bc54d46fcda4324c86c8beabe344cbb80fb57a6c868/WSME-0.9.3.tar.gz|WSME-0.9.3
-xattr-0.9.6-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/60/80/a1f35bfd3c7ffb78791b2a6a15c233584a102a20547fd96d48933ec453e7/xattr-0.9.6.tar.gz|xattr-0.9.6
-XStatic-1.0.2-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/36/78/c0ffaf14216517a14d3daa67ff24fbb60b4703e95ce1059a48fd508e6b8c/XStatic-1.0.2.tar.gz|XStatic-1.0.2
-XStatic_Angular_FileUpload-12.0.4.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/4d/fd/c3051915d2f12e8fa11f59c01162ce85e38eca15d9ec73a3d7b271b49744/XStatic-Angular-FileUpload-12.0.4.0.tar.gz|XStatic-Angular-FileUpload-12.0.4.0
-XStatic_Angular_lrdragndrop-1.0.2.4-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/80/ea/ffdde05892eabe468f22403f75299cf5d991f0af4f1400bebbf3af04bc9a/XStatic_Angular_lrdragndrop-1.0.2.4-py2.py3-none-any.whl
-XStatic_Angular_Schema_Form-0.8.13.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/57/71/ceea2c0a72e2ee2d316d6ab1c06b21faa9f5cbc4b36a4127d7847b7079c5/XStatic-Angular-Schema-Form-0.8.13.0.tar.gz|XStatic-Angular-Schema-Form-0.8.13.0
-XStatic_Bootstrap_Datepicker-1.3.1.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/91/4f/832f14478e714815bb3d44d01dfe8dbe19ccf9f823e0bc7ac1a8cf7fa6b3/XStatic-Bootstrap-Datepicker-1.3.1.0.tar.gz|XStatic-Bootstrap-Datepicker-1.3.1.0
-XStatic_Hogan-2.0.0.2-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/21/fe/37d5c8247f24738e7e368d27ebf945de1ea29fbc3112ac5e75b1b7f1d0c9/XStatic-Hogan-2.0.0.2.tar.gz|XStatic-Hogan-2.0.0.2
-XStatic_Jasmine-2.4.1.2-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/05/43/ceac7def3b6eaf82b6f593e3db2b03a9693a7b002b569e664e382aecddbc/XStatic_Jasmine-2.4.1.2-py2.py3-none-any.whl
-XStatic_jQuery-1.12.4.1-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/67/f1/c18c14fc4aab386e4aba587c5d10c268de222c75bf5e271b6f68a2ea6e77/XStatic-jQuery-1.12.4.1.tar.gz|XStatic-jQuery-1.12.4.1
-XStatic_JQuery_Migrate-1.2.1.1-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/7c/fc/edbfcb4574ec3cf0b68a0613dd1904c9139e3bf6dede792d2e7edcf13023/XStatic-JQuery-Migrate-1.2.1.1.tar.gz|XStatic-JQuery-Migrate-1.2.1.1
-XStatic_JQuery.quicksearch-2.0.3.1-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/ea/ab/f934d06a78ce2c6bb594e9a426f6966b3192c4c279467c9898be6fd284d3/XStatic-JQuery.quicksearch-2.0.3.1.tar.gz|XStatic-JQuery.quicksearch-2.0.3.1
-XStatic_JQuery.TableSorter-2.14.5.1-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/c1/6c/d6b0807906af90536e793a3b23cca557869fa5a27156639f0029de8b1f1f/XStatic-JQuery.TableSorter-2.14.5.1.tar.gz|XStatic-JQuery.TableSorter-2.14.5.1
-XStatic_jquery_ui-1.12.1.1-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/e6/5a/883b22dad1d3e01708312d71c5bc63d543d66cef9b448c1cf85379d64fb3/XStatic-jquery-ui-1.12.1.1.tar.gz|XStatic-jquery-ui-1.12.1.1
-XStatic_mdi-1.6.50.2-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/73/49/13b9f7ce9fbcc7fabe086b7ac1b056118cbd4c9abf185e01cc4a54631136/XStatic_mdi-1.6.50.2-py2.py3-none-any.whl
-XStatic_objectpath-1.2.1.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/23/6c/56de25d9d3be430e7de2fcf4baac10279dad78d7b16cbda339cf014c2fe5/XStatic-objectpath-1.2.1.0.tar.gz|XStatic-objectpath-1.2.1.0
-XStatic_Rickshaw-1.5.0.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/45/c6/39aa4d02ea96b04ff372d1e3558587155790b1c5444855a97b89c255be38/XStatic-Rickshaw-1.5.0.0.tar.gz|XStatic-Rickshaw-1.5.0.0
-XStatic_Spin-1.2.5.2-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/af/21/cca7f0b7abfe008cdd03dd4c4255aad3087f4a892a010c0f6f1452d7344b/XStatic-Spin-1.2.5.2.tar.gz|XStatic-Spin-1.2.5.2
-XStatic_term.js-0.0.7.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/63/7a/7bfec29f5f28fdda7170ebbbb2204aeb1d33d6050f3476a807590de06434/XStatic-term.js-0.0.7.0.tar.gz|XStatic-term.js-0.0.7.0
-XStatic_tv4-1.2.7.0-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/2b/26/b07115af27b339c861b8c9a775a621524b421c898e26e015880dfb888c49/XStatic-tv4-1.2.7.0.tar.gz|XStatic-tv4-1.2.7.0
-xvfbwrapper-0.2.9-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/57/b6/4920eabda9b49630dea58745e79f9919aba6408d460afe758bf6e9b21a04/xvfbwrapper-0.2.9.tar.gz|xvfbwrapper-0.2.9
-yappi-1.0-cp27-cp27mu-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/d2/92/7cd637a19fa2a10c0e55a44f8b36bcb83f0e1943ba8f1fb5edb15c819f2e/yappi-1.0.tar.gz|yappi-1.0
-zerorpc-0.6.3-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/73/ff/d61ef9f5d10e671421d1368e87d3525325483ebd7da262b1d3087443662b/zerorpc-0.6.3.tar.gz|zerorpc-0.6.3
-zVMCloudConnector-1.4.1-py2-none-any.whl|tar|https://files.pythonhosted.org/packages/11/92/9f704de9759816e7b9897b9fb41285b421498b4642551b6fbcccd2850008/zVMCloudConnector-1.4.1.tar.gz|zVMCloudConnector-1.4.1
diff --git a/build-tools/build-wheels/centos/stable-wheels.cfg b/build-tools/build-wheels/centos/stable-wheels.cfg
deleted file mode 100644
index d0b60395..00000000
--- a/build-tools/build-wheels/centos/stable-wheels.cfg
+++ /dev/null
@@ -1,183 +0,0 @@
-#
-# git: wheelname|git|git-source|basedir|branch
-# tar: wheelname|tar|wget-source|basedir
-# pypi: wheelname|pypi|wget-source
-# zip: wheelname|zip|wget-source|basedir
-#
-# If fix_setup must be called, add |fix_setup at the end of the line
-#
-# See doc/wheels-cfg.md for more info.
-#
-abclient-0.2.3-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/49/eb/091b02c1e36d68927adfb746706e2c80f7e7bfb3f16e3cbcfec2632118ab/abclient-0.2.3.tar.gz|abclient-0.2.3
-alembic-1.4.2-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/60/1e/cabc75a189de0fbb2841d0975243e59bde8b7822bacbb95008ac6fe9ad47/alembic-1.4.2.tar.gz|alembic-1.4.2
-amqplib-1.0.2-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/75/b7/8c2429bf8d92354a0118614f9a4d15e53bc69ebedce534284111de5a0102/amqplib-1.0.2.tgz|amqplib-1.0.2
-anyjson-0.3.3-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/c3/4d/d4089e1a3dd25b46bebdb55a992b0797cff657b4477bc32ce28038fdecbc/anyjson-0.3.3.tar.gz|anyjson-0.3.3
-backports.ssl_match_hostname-3.7.0.1-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/ff/2b/8265224812912bc5b7a607c44bf7b027554e1b9775e9ee0de8032e3de4b2/backports.ssl_match_hostname-3.7.0.1.tar.gz|backports.ssl_match_hostname-3.7.0.1|fix_setup
-bottle-0.12.18-py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/e9/39/2bf3a1fd963e749cdbe5036a184eda8c37d8af25d1297d94b8b7aeec17c4/bottle-0.12.18-py3-none-any.whl
-cassandra_driver-3.23.0-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/90/d7/d68083117bf50941870a795150f3261c5270e74c2d57ca3af0bd8423ed74/cassandra-driver-3.23.0.tar.gz|cassandra-driver-3.23.0
-cmd2-0.8.9-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/e9/40/a71caa2aaff10c73612a7106e2d35f693e85b8cf6e37ab0774274bca3cf9/cmd2-0.8.9-py2.py3-none-any.whl
-construct-2.8.22-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/e5/c6/3e3aeef38bb0c27364af3d21493d9690c7c3925f298559bca3c48b7c9419/construct-2.8.22.tar.gz|construct-2.8.22
-crc16-0.1.1-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/a6/e0/70a44c4385f2b33df82e518005aae16b5c1feaf082c73c0acebe3426fc0a/crc16-0.1.1.tar.gz|crc16-0.1.1|fix_setup
-demjson-2.2.4-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/96/67/6db789e2533158963d4af689f961b644ddd9200615b8ce92d6cad695c65a/demjson-2.2.4.tar.gz|demjson-2.2.4|fix_setup
-django_debreach-2.0.1-py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/2a/92/8c363cf5d1ee33d4c3b999b41c127c5cd3c64d4c20aa47bdfb6c386c9309/django_debreach-2.0.1-py3-none-any.whl
-django_floppyforms-1.8.0-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/a9/d2/498b883ac309b56b70c26877974bd50927615dd3f6433f5463e2668b1128/django_floppyforms-1.8.0-py2.py3-none-any.whl
-django_pyscss-2.0.2-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/4b/7f/d771802305184aac6010826f60a0b2ecaa3f57d19ab0e405f0c8db07e809/django-pyscss-2.0.2.tar.gz|django-pyscss-2.0.2
-docopt-0.6.2-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/a2/55/8f8cab2afd404cf578136ef2cc5dfb50baa1761b68c9da1fb1e4eed343c9/docopt-0.6.2.tar.gz|docopt-0.6.2
-dogpile.cache-0.9.0-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/ac/6a/9ac405686a94b7f009a20a50070a5786b0e1aedc707b88d40d0c4b51a82e/dogpile.cache-0.9.0.tar.gz|dogpile.cache-0.9.0
-enum_compat-0.0.3-py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/55/ae/467bc4509246283bb59746e21a1a2f5a8aecbef56b1fa6eaca78cd438c8b/enum_compat-0.0.3-py3-none-any.whl
-etcd3-0.10.0-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/09/f1/93603a26daf7a993a0acbbcfd32afce8b2fdf30a765d5651571ab635969b/etcd3-0.10.0.tar.gz|etcd3-0.10.0
-exabgp-4.2.6-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/b6/36/7270c8e4b5b0ddba79301f5bbf206ce4b76247957169162b428e2695efa9/exabgp-4.2.6.tar.gz|exabgp-4.2.6
-flask_keystone-0.2-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/1f/ca/3938de8c5f4a3d1c5dd4278bedb9d31d79816feba4d088293c620a366fb1/flask_keystone-0.2.tar.gz|flask_keystone-0.2
-flask_oslolog-0.1-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/a7/62/fec02ce761b548b1289680bb1be1aa0bce2b2c4017d5b31bd6c67c78aef9/flask_oslolog-0.1.tar.gz|flask_oslolog-0.1
-fortiosclient-0.0.3-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/e9/aa/b2c0705d5e52c8d9af35422d940800b49c562758fbdad3179a6fbf6e92f5/fortiosclient-0.0.3.tar.gz|fortiosclient-0.0.3
-frozendict-1.2-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/4e/55/a12ded2c426a4d2bee73f88304c9c08ebbdbadb82569ebdd6a0c007cfd08/frozendict-1.2.tar.gz|frozendict-1.2
-future-0.18.2-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/45/0b/38b06fd9b92dc2b68d58b75f900e97884c45bedd2ff83203d933cf5851c9/future-0.18.2.tar.gz|future-0.18.2
-googleapis_common_protos-1.51.0-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/05/46/168fd780f594a4d61122f7f3dc0561686084319ad73b4febbf02ae8b32cf/googleapis-common-protos-1.51.0.tar.gz|googleapis-common-protos-1.51.0
-happybase-1.2.0-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/d1/9c/f5f7bdb5439cda2b7da4e20ac24ec0e2455fd68aade8397f211d2994c39d/happybase-1.2.0.tar.gz|happybase-1.2.0
-hiredis-1.0.1-cp36-cp36m-manylinux1_x86_64.whl|pypi|https://files.pythonhosted.org/packages/5b/a3/23bc840f0e2baa4aedb41d90b3196fed3ae88ee43ec60059a0c8f31be4b8/hiredis-1.0.1-cp36-cp36m-manylinux1_x86_64.whl
-httplib2-0.17.2-py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/dd/a6/e3d8ae2c5b3a89de9a6b5e1e9396ce41432e08feafe25c37c4dc6b49d79d/httplib2-0.17.2-py3-none-any.whl
-ifaddr-0.1.6-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/9f/54/d92bda685093ebc70e2057abfa83ef1b3fb0ae2b6357262a3e19dfe96bb8/ifaddr-0.1.6.tar.gz|ifaddr-0.1.6
-itsdangerous-1.1.0-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/76/ae/44b03b253d6fade317f32c24d100b3b35c2239807046a4c953c7b89fa49e/itsdangerous-1.1.0-py2.py3-none-any.whl
-jaeger_client-4.3.0-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/2b/75/17a937a61135671cebc175ab5c299dc0f7477042469482fd9a6f91262c68/jaeger-client-4.3.0.tar.gz|jaeger-client-4.3.0
-jsonpath_rw-1.4.0-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/71/7c/45001b1f19af8c4478489fbae4fc657b21c4c669d7a5a036a86882581d85/jsonpath-rw-1.4.0.tar.gz|jsonpath-rw-1.4.0
-krest-1.3.1-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/fb/d2/9dbbd3a76f2385041720a0eb51ddab676e688fa8bee8a1489470839616cf/krest-1.3.1.tar.gz|krest-1.3.1
-libvirt_python-4.7.0-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/ad/d7/251c52f937f1e6c6304c4a2ca088a0cfb9ae139c9be5c476e8351d976b4a/libvirt-python-4.7.0.tar.gz|libvirt-python-4.7.0|fix_setup
-logutils-0.3.5-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/49/b2/b57450889bf73da26027f8b995fd5fbfab258ec24ef967e4c1892f7cb121/logutils-0.3.5.tar.gz|logutils-0.3.5|fix_setup
-lz4-0.9.0-cp36-cp36m-linux_x86_64.whl|git|https://github.com/python-lz4/python-lz4|python-lz4|v0.9.0
-Mako-1.1.2-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/50/78/f6ade1e18aebda570eed33b7c534378d9659351cadce2fcbc7b31be5f615/Mako-1.1.2-py2.py3-none-any.whl
-marathon-0.12.0-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/41/66/814432693297dfb076958ae5ac781e3a88fd70d335473a57f4f2c6329515/marathon-0.12.0-py2.py3-none-any.whl
-MarkupSafe-1.1.1-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/b9/2e/64db92e53b86efccfaea71321f597fa2e1b2bd3853d8ce658568f7a13094/MarkupSafe-1.1.1.tar.gz|MarkupSafe-1.1.1
-migrate-0.3.8-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/ce/31/1a4cbf8dc0536c55f41072e8ea37b3df1e412262dc731c57e5bb099eb9b2/migrate-0.3.8.tar.gz|migrate-0.3.8
-mox-0.5.3-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/0c/a1/64740c638cc5fae807022368f4141700518ee343b53eb3e90bf3cc15a4d4/mox-0.5.3.tar.gz|mox-0.5.3|fix_setup
-mpmath-1.1.0-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/ca/63/3384ebb3b51af9610086b23ea976e6d27d6d97bf140a76a365bd77a3eb32/mpmath-1.1.0.tar.gz|mpmath-1.1.0|fix_setup
-msgpack_python-0.4.8-cp36-cp36m-linux_x86_64.whl|git|https://github.com/msgpack/msgpack-python.git|msgpack-python|0.4.8
-munch-2.5.0-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/cc/ab/85d8da5c9a45e072301beb37ad7f833cd344e04c817d97e0cc75681d248f/munch-2.5.0-py2.py3-none-any.whl
-ndg_httpsclient-0.5.1-py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/fb/67/c2f508c00ed2a6911541494504b7cac16fe0b0473912568df65fd1801132/ndg_httpsclient-0.5.1-py3-none-any.whl
-netifaces-0.10.9-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/0d/18/fd6e9c71a35b67a73160ec80a49da63d1eed2d2055054cc2995714949132/netifaces-0.10.9.tar.gz|netifaces-0.10.9
-networking_sfc-8.0.0.0b2-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/6a/a8/0e9bdd1f87dfb50682f23a01f590530ec8fa715e51127cf9f58d1905886c/networking_sfc-8.0.0.0b2-py2.py3-none-any.whl
-networkx-2.2-py2.py3-none-any.whl|zip|https://files.pythonhosted.org/packages/f3/f4/7e20ef40b118478191cec0b58c3192f822cace858c19505c7670961b76b2/networkx-2.2.zip|networkx-2.2
-neutron_lib-2.3.0-py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/83/52/805c061a96efca3c70c91d93fa8f7f555a7f86ba955ab9e4d1b41399459f/neutron_lib-2.3.0-py3-none-any.whl
-nodeenv-1.3.5-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/08/43/86ff33286c83f7b5e8903c32db01fe122c5e8a9d8dc1067dcaa9be54a033/nodeenv-1.3.5-py2.py3-none-any.whl
-nose_exclude-0.5.0-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/63/cf/90c4be56bf11b7bc8801086d9445baf731aa36b8e8fc5791731e8e604dcd/nose-exclude-0.5.0.tar.gz|nose-exclude-0.5.0
-nosehtmloutput-0.0.7-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/e0/5d/2bb521a8ccb0222bd94ed557645955d95ba6798df6b3b4bdc2c31dec4f7c/nosehtmloutput-0.0.7-py2.py3-none-any.whl
-openshift-0.8.6-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/73/ed/c92c0ba23b6c4c8e5542151a1b89cb8ff01f68a72fe68f6c95a28d885ebe/openshift-0.8.6.tar.gz|openshift-0.8.6
-openstack.nose_plugin-0.11-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/bc/83/e7c9b9297e1a501d2c2617f98d6176199570e8ee32f0e72669c8852c6c81/openstack.nose_plugin-0.11.tar.gz|openstack.nose_plugin-0.11
-opentracing-2.3.0-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/e4/a8/df5285f42cd07782409d0ae835785fae6e2a0f7e8b0036ea302f1422fd25/opentracing-2.3.0.tar.gz|opentracing-2.3.0
-ovs-2.11.0-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/81/06/387b2159ac073de95e484aa6e2f108a232cd906e350307168843061f899f/ovs-2.11.0.tar.gz|ovs-2.11.0
-panko-5.0.0-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/a9/89/d666e0889d869e41c9b7f87a0a34858b2520782b82e025da84c98e0db8f6/panko-5.0.0.tar.gz|panko-5.0.0
-pathlib-1.0.1-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/ac/aa/9b065a76b9af472437a0059f77e8f962fe350438b927cb80184c32f075eb/pathlib-1.0.1.tar.gz|pathlib-1.0.1|fix_setup
-pecan-1.3.3-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/93/98/889d7615595e894f4f7e4c17d4008c822c8e39e650c8ab390cc6c39b99c4/pecan-1.3.3.tar.gz|pecan-1.3.3
-pifpaf-2.4.0-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/66/12/ed1533c0b31647ea9fb879b5ad239336ad98628227d0b90d3c7157ffb3fb/pifpaf-2.4.0-py2.py3-none-any.whl
-pika_pool-0.1.3-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/ec/48/50c8f02a3eef4cb824bec50661ec1713040402cc1b2a38954dc977a59c23/pika-pool-0.1.3.tar.gz|pika-pool-0.1.3
-Pint-0.9-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/15/9d/bf177ebbc57d25e9e296addc14a1303d1e34d7964af5df428a8332349c42/Pint-0.9-py2.py3-none-any.whl
-ply-3.11-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/a3/58/35da89ee790598a0700ea49b2a66594140f44dec458c07e8e3d4979137fc/ply-3.11-py2.py3-none-any.whl
-positional-1.1.2-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/8c/16/64a4fa0967c486380468dca18867d22ac1c17bba06349e31ace77c7757f7/positional-1.1.2.tar.gz|positional-1.1.2
-prettytable-0.7.2-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/e0/a1/36203205f77ccf98f3c6cf17cf068c972e6458d7e58509ca66da949ca347/prettytable-0.7.2.tar.gz|prettytable-0.7.2
-proboscis-1.2.6.0-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/3c/c8/c187818ab8d0faecdc3c16c1e0b2e522f3b38570f0fb91dcae21662019d0/proboscis-1.2.6.0.tar.gz|proboscis-1.2.6.0
-psutil-5.7.0-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/c4/b8/3512f0e93e0db23a71d82485ba256071ebef99b227351f0f5540f744af41/psutil-5.7.0.tar.gz|psutil-5.7.0
-psycopg2-2.8.5-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/a8/8f/1c5690eebf148d1d1554fc00ccf9101e134636553dbb75bdfef4f85d7647/psycopg2-2.8.5.tar.gz|psycopg2-2.8.5
-PuLP-2.1-py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/41/34/757c88c320f80ce602199603afe63aed1e0bc11180b9a9fb6018fb2ce7ef/PuLP-2.1-py3-none-any.whl
-pycparser-2.20-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/ae/e7/d9c3a176ca4b02024debf82342dab36efadfc5776f9c8db077e8f6e71821/pycparser-2.20-py2.py3-none-any.whl
-pycrypto-2.6.1-cp36-cp36m-linux_x86_64.whl|git|https://github.com/dlitz/pycrypto|pycrypto|v2.6.1|fix_setup
-pycryptodomex-3.9.7-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/7f/3c/80cfaec41c3a9d0f524fe29bca9ab22d02ac84b5bfd6e22ade97d405bdba/pycryptodomex-3.9.7.tar.gz|pycryptodomex-3.9.7
-pydot-1.4.1-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/33/d1/b1479a770f66d962f545c2101630ce1d5592d90cb4f083d38862e93d16d2/pydot-1.4.1-py2.py3-none-any.whl
-pydotplus-2.0.2-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/60/bf/62567830b700d9f6930e9ab6831d6ba256f7b0b730acb37278b0ccdffacf/pydotplus-2.0.2.tar.gz|pydotplus-2.0.2
-pyeclib-1.6.0-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/aa/d6/ca6bba5e66fc7a9810a995b17a3675492da2bec405806d8ac3db18cfd93b/pyeclib-1.6.0.tar.gz|pyeclib-1.6.0|fix_setup
-pyinotify-0.9.6-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/e3/c0/fd5b18dde17c1249658521f69598f3252f11d9d7a980c5be8619970646e1/pyinotify-0.9.6.tar.gz|pyinotify-0.9.6
-pykerberos-1.2.1-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/9a/b8/1ec56b6fa8a2e2a81420bd3d90e70b59fc83f6b857fb2c2c37accddc8be3/pykerberos-1.2.1.tar.gz|pykerberos-1.2.1
-PyKMIP-0.10.0-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/f8/3e/e343bb9c2feb2a793affd052cb0da62326a021457a07d59251f771b523e7/PyKMIP-0.10.0.tar.gz|PyKMIP-0.10.0
-pylxd-2.2.10-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/49/9a/eba58646721ffbff40dc41571b13c9528fdc4e26a82252318c997cdbe26a/pylxd-2.2.10.tar.gz|pylxd-2.2.10
-pyngus-2.3.0-py3-none-any.whl|zip|https://files.pythonhosted.org/packages/58/b1/336b8f64e7e4efa9b95027af71e02cd4cfacca8f919345badb852381878a/pyngus-2.3.0.zip|pyngus-2.3.0
-pyperclip-1.8.0-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/f6/5b/55866e1cde0f86f5eec59dab5de8a66628cb0d53da74b8dbc15ad8dabda3/pyperclip-1.8.0.tar.gz|pyperclip-1.8.0
-pyroute2-0.5.11-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/00/5c/600b3fa746da0c857e1775b9cf0861eb8aaaec67c42352bb82f90c77e6fc/pyroute2-0.5.11.tar.gz|pyroute2-0.5.11
-pyrsistent-0.16.0-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/9f/0d/cbca4d0bbc5671822a59f270e4ce3f2195f8a899c97d0d5abb81b191efb5/pyrsistent-0.16.0.tar.gz|pyrsistent-0.16.0
-pyScss-1.3.7-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/e6/0d/6b52a5211121b870cc0c4c908b689fd460630b01a9e501a534db78e67bad/pyScss-1.3.7.tar.gz|pyScss-1.3.7
-pysendfile-2.0.1-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/cd/3f/4aa268afd0252f06b3b487c296a066a01ddd4222a46b7a3748599c8fc8c3/pysendfile-2.0.1.tar.gz|pysendfile-2.0.1
-pystache-0.5.4-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/d6/fd/eb8c212053addd941cc90baac307c00ac246ac3fce7166b86434c6eae963/pystache-0.5.4.tar.gz|pystache-0.5.4
-python_barbicanclient-4.10.0-py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/93/bf/b254f88d3c1a50212609d44ff8798e64f11df28011ead93161a2390cd4a2/python_barbicanclient-4.10.0-py3-none-any.whl
-python_cinderclient-7.0.0-py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/64/8f/c675ad3f12d52739948b299607285a56d0a1e7d1bcc72ceed1f625a38fff/python_cinderclient-7.0.0-py3-none-any.whl
-python_consul-1.1.0-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/3f/d0/59bc5f1c6c4d4b498c41d8ce7052ee9e9d68be19e16038a55252018a6c4d/python_consul-1.1.0-py2.py3-none-any.whl
-python_editor-1.0.4-py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/c6/d3/201fc3abe391bbae6606e6f1d598c15d367033332bd54352b12f35513717/python_editor-1.0.4-py3-none-any.whl
-python_etcd-0.4.5-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/a1/da/616a4d073642da5dd432e5289b7c1cb0963cc5dde23d1ecb8d726821ab41/python-etcd-0.4.5.tar.gz|python-etcd-0.4.5
-python_json_logger-0.1.11-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/80/9d/1c3393a6067716e04e6fcef95104c8426d262b4adaf18d7aa2470eab028d/python-json-logger-0.1.11.tar.gz|python-json-logger-0.1.11
-python_ldap-3.2.0-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/ea/93/596f875e003c770447f4b99267820a0c769dd2dc3ae3ed19afe460fcbad0/python-ldap-3.2.0.tar.gz|python-ldap-3.2.0
-python_memcached-1.59-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/f5/90/19d3908048f70c120ec66a39e61b92c253e834e6e895cd104ce5e46cbe53/python_memcached-1.59-py2.py3-none-any.whl
-python_neutronclient-7.1.0-py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/e2/b9/2680f60f679e3d5099274e966a68d0c45e2387aa53c8754c7f120838aeb4/python_neutronclient-7.1.0-py3-none-any.whl
-python_nss-1.0.1-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/6b/29/629098e34951c358b1f04f13a70b3590eb0cf2df817d945bd05c4169d71b/python-nss-1.0.1.tar.bz2|python-nss-1.0.1|fix_setup
-python_pcre-0.7-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/9d/af/61435bd163f01fe3709fca9b1f79e4978d8089ee671d2e004fc85e10de29/python-pcre-0.7.tar.gz|python-pcre-0.7|fix_setup
-python_pytun-2.3.0-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/52/a4/a062106c739eac79c8160fcf5779ebc84afc1c38b016ab216ed1e6da69b6/python-pytun-2.3.0.tar.gz|python-pytun-2.3.0|fix_setup
-python_string_utils-0.6.0-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/5d/13/216f2d4a71307f5a4e5782f1f59e6e8e5d6d6c00eaadf9f92aeccfbb900c/python-string-utils-0.6.0.tar.gz|python-string-utils-0.6.0|fix_setup
-pyudev-0.22.0-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/72/c8/4660d815a79b1d42c409012aaa10ebd6b07a47529b4cb6880f27a24bd646/pyudev-0.22.0.tar.gz|pyudev-0.22.0
-PyYAML-5.3.1-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/64/c2/b80047c7ac2478f9501676c988a5411ed5572f35d1beff9cae07d321512c/PyYAML-5.3.1.tar.gz|PyYAML-5.3.1
-pyzabbix-0.7.5-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/11/ad/24e19d0cf16d05b7ee19f337f02058ee9b760649171865469ccceef83027/pyzabbix-0.7.5.tar.gz|pyzabbix-0.7.5
-rcssmin-1.0.6-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/e2/5f/852be8aa80d1c24de9b030cdb6532bc7e7a1c8461554f6edbe14335ba890/rcssmin-1.0.6.tar.gz|rcssmin-1.0.6|fix_setup
-repoze.lru-0.7-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/12/bc/595a77c4b5e204847fdf19268314ef59c85193a9dc9f83630fc459c0fee5/repoze.lru-0.7.tar.gz|repoze.lru-0.7
-requests_aws-0.1.8-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/5e/2f/4da17752036c04cf4c9af7a2da0d41ef2205043f1c61008006475aa24b8b/requests-aws-0.1.8.tar.gz|requests-aws-0.1.8
-restructuredtext_lint-1.3.0-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/62/76/bd8760de759fb74d7863e6935200af101cb128a7de008741a4e22341d03c/restructuredtext_lint-1.3.0.tar.gz|restructuredtext_lint-1.3.0
-retrying-1.3.3-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/44/ef/beae4b4ef80902f22e3af073397f079c96969c69b2c7d52a57ea9ae61c9d/retrying-1.3.3.tar.gz|retrying-1.3.3
-rfc3986-1.4.0-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/78/be/7b8b99fd74ff5684225f50dd0e865393d2265656ef3b4ba9eaaaffe622b8/rfc3986-1.4.0-py2.py3-none-any.whl
-rjsmin-1.1.0-cp36-cp36m-manylinux1_x86_64.whl|pypi|https://files.pythonhosted.org/packages/62/ee/574b170bbe7a059314e7239305cb829379232a408901585019e012e71170/rjsmin-1.1.0-cp36-cp36m-manylinux1_x86_64.whl
-rtslib_fb-2.1.71-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/9e/1b/c26bc038888b1e6042d35ec97599cef05181fb6a7a7ecdbb0c041c3f50ea/rtslib-fb-2.1.71.tar.gz|rtslib-fb-2.1.71|
-scandir-1.10.0-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/df/f5/9c052db7bd54d0cbf1bc0bb6554362bba1012d03e5888950a4f5c5dadc4e/scandir-1.10.0.tar.gz|scandir-1.10.0
-scrypt-0.8.13-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/80/3d/141eb80e754b86f6c25a2ffaf6c3af3acdb65a3e3700829a05ab0c5d965d/scrypt-0.8.13.tar.gz|scrypt-0.8.13
-SecretStorage-2.3.1-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/a5/a5/0830cfe34a4cfd0d1c3c8b614ede1edb2aaf999091ac8548dd19cb352e79/SecretStorage-2.3.1.tar.gz|SecretStorage-2.3.1
-setproctitle-1.1.10-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/5a/0d/dc0d2234aacba6cf1a729964383e3452c52096dc695581248b548786f2b3/setproctitle-1.1.10.tar.gz|setproctitle-1.1.10
-simplegeneric-0.8.1-py3-none-any.whl|zip|https://files.pythonhosted.org/packages/3d/57/4d9c9e3ae9a255cd4e1106bb57e24056d3d0709fc01b2e3e345898e49d5b/simplegeneric-0.8.1.zip|simplegeneric-0.8.1
-simplejson-3.17.0-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/98/87/a7b98aa9256c8843f92878966dc3d8d914c14aad97e2c5ce4798d5743e07/simplejson-3.17.0.tar.gz|simplejson-3.17.0
-skydive_client-0.7.0-py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/98/86/62925511c6282add4e339639fc5a9e22fd0dc95783b7627fd56bf45a32bf/skydive_client-0.7.0-py3-none-any.whl
-smmap-3.0.4-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/b0/9a/4d409a6234eb940e6a78dfdfc66156e7522262f5f2fecca07dc55915952d/smmap-3.0.4-py2.py3-none-any.whl
-sphinxcontrib_fulltoc-1.2.0-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/8e/a6/d1297db9b75650681e5429e92e13df139ee6b64303ff1b2eea4ebd32c0a9/sphinxcontrib-fulltoc-1.2.0.tar.gz|sphinxcontrib-fulltoc-1.2.0
-sphinxcontrib_pecanwsme-0.10.0-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/75/2b/105d07f47485ecf774cd80b881c29e148182b72a3a60596abdd016c87fce/sphinxcontrib-pecanwsme-0.10.0.tar.gz|sphinxcontrib-pecanwsme-0.10.0
-SQLAlchemy-1.3.16-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/7f/4b/adfb1f03da7f50db054a5b728d32dbfae8937754cfa159efa0216a3758d1/SQLAlchemy-1.3.16.tar.gz|SQLAlchemy-1.3.16
-SQLAlchemy_Utils-0.36.3-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/aa/24/68937e9b5c757f62795467e2f02a8f463a3a1fd3d08bd32a6b0583ba3dbf/SQLAlchemy-Utils-0.36.3.tar.gz|SQLAlchemy-Utils-0.36.3
-stomp.py-6.0.0-py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/d7/a0/a67e46ec1e63f2e78497e7331092eeb2ce4b69738d80a8210122e7a000a9/stomp.py-6.0.0-py3-none-any.whl
-subprocess32-3.5.4-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/32/c8/564be4d12629b912ea431f1a50eb8b3b9d00f1a0b1ceff17f266be190007/subprocess32-3.5.4.tar.gz|subprocess32-3.5.4
-suds_jurko-0.6-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/bd/6f/54fbf0999a606680d27c69b1ad12dfff62768ecb9fe48524cebda6eb4423/suds-jurko-0.6.tar.bz2|suds-jurko-0.6
-systemd_python-234-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/e8/a8/00ba0f605837a8f69523e6c3a4fb14675a6430c163f836540129c50b3aef/systemd-python-234.tar.gz|systemd-python-234|fix_setup
-sysv_ipc-1.0.1-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/57/8a/9bbb064566320cd66c6e32c35db76d43932d7b94348f0c4c1e74d03ec261/sysv_ipc-1.0.1.tar.gz|sysv_ipc-1.0.1|fix_setup
-tabulate-0.8.7-py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/c4/f4/770ae9385990f5a19a91431163d262182d3203662ea2b5739d0fcfc080f1/tabulate-0.8.7-py3-none-any.whl
-tempest-24.0.0-py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/f0/eb/d3fb2cdb72c20caa7a4e0af2c60176ce82e120e99ce7e5a62a386faae89c/tempest-24.0.0-py3-none-any.whl
-Tempita-0.5.2-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/56/c8/8ed6eee83dbddf7b0fc64dd5d4454bc05e6ccaafff47991f73f2894d9ff4/Tempita-0.5.2.tar.gz|Tempita-0.5.2
-termcolor-1.1.0-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/8a/48/a76be51647d0eb9f10e2a4511bf3ffb8cc1e6b14e9e4fab46173aa79f981/termcolor-1.1.0.tar.gz|termcolor-1.1.0|fix_setup
-testrepository-0.0.20-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/0c/85/f495b58b2b0ac907def07385219e9747b75840fa01280f228546a4a5ad7f/testrepository-0.0.20.tar.gz|testrepository-0.0.20
-thrift-0.13.0-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/97/1e/3284d19d7be99305eda145b8aa46b0c33244e4a496ec66440dac19f8274d/thrift-0.13.0.tar.gz|thrift-0.13.0
-thriftpy-0.3.9-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/f4/19/cca118cf7d2087310dbc8bd70dc7df0c1320f2652873a93d06d7ba356d4a/thriftpy-0.3.9.tar.gz|thriftpy-0.3.9
-thriftpy2-0.4.11-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/a9/f0/9bf08e6b5983aa6a6103818da21eadfaea1ad99ec9882be3e75a30e8e9ff/thriftpy2-0.4.11.tar.gz|thriftpy2-0.4.11
-tinyrpc-1.0.4-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/9d/91/c639ba014aada92446516c5fc4b04f2cee3539ab2d0758a6a87a6da973cb/tinyrpc-1.0.4.tar.gz|tinyrpc-1.0.4
-tornado-6.0.4-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/95/84/119a46d494f008969bf0c775cb2c6b3579d3c4cc1bb1b41a022aa93ee242/tornado-6.0.4.tar.gz|tornado-6.0.4
-trollius-2.2.post1-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/0b/31/356ae13ad4df58f963e9954d55118f6cffdb3a903c1547973ad7bc347fb9/trollius-2.2.post1.tar.gz|trollius-2.2.post1
-ujson-2.0.3-cp36-cp36m-manylinux1_x86_64.whl|pypi|https://files.pythonhosted.org/packages/a8/e4/a79c57e22d6d09bbeb5e8febb8cfa0fe10ede69eed9c3458d3ec99014e20/ujson-2.0.3-cp36-cp36m-manylinux1_x86_64.whl
-unicodecsv-0.14.1-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/6f/a4/691ab63b17505a26096608cc309960b5a6bdf39e4ba1a793d5f9b1a53270/unicodecsv-0.14.1.tar.gz|unicodecsv-0.14.1
-uWSGI-2.0.17.1-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/a2/c9/a2d5737f63cd9df4317a4acc15d1ddf4952e28398601d8d7d706c16381e0/uwsgi-2.0.17.1.tar.gz|uwsgi-2.0.17.1
-voluptuous-0.11.7-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/24/3b/fe531688c0d9e057fccc0bc9430c0a3d4b90e0d2f015326e659c2944e328/voluptuous-0.11.7.tar.gz|voluptuous-0.11.7
-warlock-1.3.3-py2.py3-none-any.whl|tar|https://files.pythonhosted.org/packages/c2/36/178b26a338cd6d30523246da4721b1114306f588deb813f3f503052825ee/warlock-1.3.3.tar.gz|warlock-1.3.3
-weakrefmethod-1.0.3-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/99/82/73a21e3eab9a1ff76d12375f7301fba5c6325b9598eed0ae5b0cf5243656/weakrefmethod-1.0.3.tar.gz|weakrefmethod-1.0.3
-websockify-0.9.0-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/c4/5b/16ec1e9f4fc536846d95a01a77d97da12f8042ca5cf83cdf3dd0442e881c/websockify-0.9.0.tar.gz|websockify-0.9.0
-whereto-0.4.0-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/80/83/371a699ce90257608592dadca400a7ecd9a2db6137d78f6f433c7c5e3197/whereto-0.4.0.tar.gz|whereto-0.4.0
-wrapt-1.12.1-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/82/f7/e43cefbe88c5fd371f4cf0cf5eb3feccd07515af9fd6cf7dbf1d1793a797/wrapt-1.12.1.tar.gz|wrapt-1.12.1|fix_setup
-ws4py-0.5.1-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/53/20/4019a739b2eefe9282d3822ef6a225250af964b117356971bd55e274193c/ws4py-0.5.1.tar.gz|ws4py-0.5.1
-WSME-0.10.0-py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/e6/79/8aca55e7f3f21549dba59c276fc990b8d9bbde071fb17e1a968254d1df36/WSME-0.10.0-py3-none-any.whl
-xattr-0.9.7-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/c1/74/1ff659d6deb1d2d6babb9483171edfa330264ae2cbf005035bb7a77b07d2/xattr-0.9.7.tar.gz|xattr-0.9.7
-XStatic-1.0.2-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/36/78/c0ffaf14216517a14d3daa67ff24fbb60b4703e95ce1059a48fd508e6b8c/XStatic-1.0.2.tar.gz|XStatic-1.0.2
-XStatic_Angular_FileUpload-12.0.4.0-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/4d/fd/c3051915d2f12e8fa11f59c01162ce85e38eca15d9ec73a3d7b271b49744/XStatic-Angular-FileUpload-12.0.4.0.tar.gz|XStatic-Angular-FileUpload-12.0.4.0
-XStatic_Angular_lrdragndrop-1.0.2.4-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/80/ea/ffdde05892eabe468f22403f75299cf5d991f0af4f1400bebbf3af04bc9a/XStatic_Angular_lrdragndrop-1.0.2.4-py2.py3-none-any.whl
-XStatic_Angular_Schema_Form-0.8.13.0-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/57/71/ceea2c0a72e2ee2d316d6ab1c06b21faa9f5cbc4b36a4127d7847b7079c5/XStatic-Angular-Schema-Form-0.8.13.0.tar.gz|XStatic-Angular-Schema-Form-0.8.13.0
-XStatic_Bootstrap_Datepicker-1.4.0.0-py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/3e/ab/806279e234318feb71c392b51d3a5c537c96e123b8e53c7bdeadf987b174/XStatic_Bootstrap_Datepicker-1.4.0.0-py3-none-any.whl
-XStatic_Hogan-2.0.0.3-py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/6d/a3/822ce8570757a5b258c39f71f357b2276365f0e6d91094e37d706da5bee4/XStatic_Hogan-2.0.0.3-py3-none-any.whl
-XStatic_Jasmine-2.4.1.2-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/05/43/ceac7def3b6eaf82b6f593e3db2b03a9693a7b002b569e664e382aecddbc/XStatic_Jasmine-2.4.1.2-py2.py3-none-any.whl
-XStatic_jQuery-1.12.4.1-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/67/f1/c18c14fc4aab386e4aba587c5d10c268de222c75bf5e271b6f68a2ea6e77/XStatic-jQuery-1.12.4.1.tar.gz|XStatic-jQuery-1.12.4.1
-XStatic_JQuery_Migrate-1.2.1.2-py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/07/25/a1b3d6ecec8a889132951935cd1daec7b3a3f91bf08bdfb670b7ee5c3785/XStatic_JQuery_Migrate-1.2.1.2-py3-none-any.whl
-XStatic_JQuery.quicksearch-2.0.3.2-py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/41/cf/24665d03c2c5963f0ad476b2af16a59af377735ab89d48d97e178409faf5/XStatic_JQuery.quicksearch-2.0.3.2-py3-none-any.whl
-XStatic_JQuery.TableSorter-2.14.5.2-py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/38/af/f36c9ef0c5c1e12caca2d9f126573cdd7b97bc8d922fabe903964d078181/XStatic_JQuery.TableSorter-2.14.5.2-py3-none-any.whl
-XStatic_jquery_ui-1.12.1.1-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/e6/5a/883b22dad1d3e01708312d71c5bc63d543d66cef9b448c1cf85379d64fb3/XStatic-jquery-ui-1.12.1.1.tar.gz|XStatic-jquery-ui-1.12.1.1
-XStatic_mdi-1.6.50.2-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/73/49/13b9f7ce9fbcc7fabe086b7ac1b056118cbd4c9abf185e01cc4a54631136/XStatic_mdi-1.6.50.2-py2.py3-none-any.whl
-XStatic_objectpath-1.2.1.0-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/23/6c/56de25d9d3be430e7de2fcf4baac10279dad78d7b16cbda339cf014c2fe5/XStatic-objectpath-1.2.1.0.tar.gz|XStatic-objectpath-1.2.1.0
-XStatic_Rickshaw-1.5.1.0-py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/23/cc/20380c36f60a424e655c005ce8be9329cbf41c58c5aa3db773485d1d0dcd/XStatic_Rickshaw-1.5.1.0-py3-none-any.whl
-XStatic_Spin-1.2.5.3-py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/ba/27/c678a4ca0e0a14f5a9edf4c97a89a6c493446b1a00aee78ea03e79333097/XStatic_Spin-1.2.5.3-py3-none-any.whl
-XStatic_term.js-0.0.7.0-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/63/7a/7bfec29f5f28fdda7170ebbbb2204aeb1d33d6050f3476a807590de06434/XStatic-term.js-0.0.7.0.tar.gz|XStatic-term.js-0.0.7.0
-XStatic_tv4-1.2.7.0-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/2b/26/b07115af27b339c861b8c9a775a621524b421c898e26e015880dfb888c49/XStatic-tv4-1.2.7.0.tar.gz|XStatic-tv4-1.2.7.0
-XStatic_Font_Awesome-4.7.0.0-py2.py3-none-any.whl|pypi|https://files.pythonhosted.org/packages/b4/ca/24685f91f744cde936294c033685cb4bb3302430f005cc834d86d75b9640/XStatic_Font_Awesome-4.7.0.0-py2.py3-none-any.whl
-xvfbwrapper-0.2.9-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/57/b6/4920eabda9b49630dea58745e79f9919aba6408d460afe758bf6e9b21a04/xvfbwrapper-0.2.9.tar.gz|xvfbwrapper-0.2.9
-yappi-1.2.3-cp36-cp36m-linux_x86_64.whl|tar|https://files.pythonhosted.org/packages/37/dc/86bbe1822cdc6dbf46c644061bd24217f6a0f056f00162a3697c9bea7575/yappi-1.2.3.tar.gz|yappi-1.2.3
-yaql-1.1.3-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/77/89/cfee017cf4f2d6f5e7159bbf13fe4131c7dbf20d675b78c9928ae9aa9df8/yaql-1.1.3.tar.gz|yaql-1.1.3
-zVMCloudConnector-1.4.1-py3-none-any.whl|tar|https://files.pythonhosted.org/packages/11/92/9f704de9759816e7b9897b9fb41285b421498b4642551b6fbcccd2850008/zVMCloudConnector-1.4.1.tar.gz|zVMCloudConnector-1.4.1
diff --git a/build-tools/build-wheels/doc/wheels-cfg.md b/build-tools/build-wheels/doc/wheels-cfg.md
index f7fa5d2d..77499211 100644
--- a/build-tools/build-wheels/doc/wheels-cfg.md
+++ b/build-tools/build-wheels/doc/wheels-cfg.md
@@ -1,6 +1,6 @@
 # Syntax of wheels config files
 
-The files {debian,centos}/{stable,dev}-wheels.cfg list the 3rd-party wheels
+The files {debian}/{stable,dev}-wheels.cfg list the 3rd-party wheels
 (ie compiled python modules) to be included in the wheels tarball. Wheels are
 listed one per line, each with the following "|"-separated fields.
 
diff --git a/build-tools/build-wheels/get-stx-wheels.sh b/build-tools/build-wheels/get-stx-wheels.sh
index 3f55f34a..972e5d1c 100755
--- a/build-tools/build-wheels/get-stx-wheels.sh
+++ b/build-tools/build-wheels/get-stx-wheels.sh
@@ -14,7 +14,7 @@ if [ -z "${MY_WORKSPACE}" -o -z "${MY_REPO}" ]; then
     exit 1
 fi
 
-SUPPORTED_OS_ARGS=('centos' 'debian')
+SUPPORTED_OS_ARGS=('debian')
 OS=
 BUILD_STREAM=stable
 
@@ -24,7 +24,7 @@ Usage:
 $(basename $0) [ --os <os> ] [ --stream <stable|dev> ]
 
 Options:
-    --os:         Specify base OS (eg. centos)
+    --os:         Specify base OS (eg. debian)
     --stream:     Openstack release (default: stable)
 
 EOF
@@ -92,47 +92,23 @@ fi
 
 source ${MY_REPO}/build-tools/git-utils.sh
 
-# For backward compatibility.  Old repo location or new?
-if [ "${OS}" = "centos" ]; then
-    CENTOS_REPO=${MY_REPO}/centos-repo
-    if [ ! -d ${CENTOS_REPO} ]; then
-        CENTOS_REPO=${MY_REPO}/cgcs-centos-repo
-        if [ ! -d ${CENTOS_REPO} ]; then
-            echo "ERROR: directory ${MY_REPO}/centos-repo not found."
-            exit 1
-        fi
-    fi
-fi
-
 function get_wheels_files {
     find ${GIT_LIST} -maxdepth 1 -name "${OS}_${BUILD_STREAM}_wheels.inc"
 }
 
 function get_lower_layer_wheels_files {
     # FIXME: debian: these are in repomgr pod, can't get to them easily
-    if [[ "${OS}" != "centos" ]] ; then
+    if [[ "${OS}" == "debian" ]] ; then
         echo "$OS: lower layer wheels not supported!" >&2
         return 1
     fi
-    find ${CENTOS_REPO}/layer_wheels_inc -maxdepth 1 -name "*_${OS}_${BUILD_STREAM}_wheels.inc"
-}
-
-function find_wheel_rpm {
-    local wheel="$1"
-    local repo=
-
-    for repo in ${MY_WORKSPACE}/std/rpmbuild/RPMS \
-                ${CENTOS_REPO}/Binary; do
-        if [ -d $repo ]; then
-            find $repo -name "${wheel}-[^-]*-[^-]*[.][^.]*[.]rpm"
-        fi
-    done | head -n 1
+    # find ${DEBIAN_REPO}/layer_wheels_inc -maxdepth 1 -name "*_${OS}_${BUILD_STREAM}_wheels.inc"
 }
 
 function find_wheel_deb {
     local wheel="$1"
     local repo=
-    # FIXME: debian: we should also scan non-stx RPMs, but they are in repomgr
+    # FIXME: debian: we should also scan non-stx packages, but they are in repomgr
     #        pod and we can't easily get to them.
     for repo in ${MY_WORKSPACE}/std ; do
         if [ -d $repo ]; then
@@ -160,26 +136,6 @@ cd ${BUILD_OUTPUT_PATH}
 declare -a FAILED
 for wheel in $(sed -e 's/#.*//' ${WHEELS_FILES[@]} | sort -u); do
     case $OS in
-        centos)
-            # Bash globbing does not handle [^\-] well,
-            # so use grep instead
-            wheelfile="$(find_wheel_rpm ${wheel})"
-
-            if [ ! -e "${wheelfile}" ]; then
-                echo "Could not find ${wheel}" >&2
-                FAILED+=($wheel)
-                continue
-            fi
-
-            echo Extracting ${wheelfile}
-
-            rpm2cpio ${wheelfile} | cpio -vidu
-            if [ ${PIPESTATUS[0]} -ne 0 -o ${PIPESTATUS[1]} -ne 0 ]; then
-                echo "Failed to extract content of ${wheelfile}" >&2
-                FAILED+=($wheel)
-            fi
-
-            ;;
         debian)
             wheelfile="$(find_wheel_deb ${wheel})"
             if [ ! -e "${wheelfile}" ]; then
diff --git a/build-tools/build_guest/build-guest-image.py b/build-tools/build_guest/build-guest-image.py
deleted file mode 100755
index ec26319a..00000000
--- a/build-tools/build_guest/build-guest-image.py
+++ /dev/null
@@ -1,123 +0,0 @@
-#!/usr/bin/env python
-
-#
-# Build a bootable guest image from the supplied rootfs archive
-#
-
-import getopt
-import guestfs
-import os
-import sys
-
-
-MBR_FILE='/usr/share/syslinux/mbr.bin'
-MBR_SIZE=440
-
-def build_image(inputfile, outputfile, extrasize, trace):
-    g = guestfs.GuestFS(python_return_dict=True)
-
-    # Set the trace flag so that we can see each libguestfs call.
-    if trace:
-        g.set_trace(1)
-
-    # Create a raw-format sparse disk image with padding of size
-    inputsize = os.path.getsize(inputfile)
-    g.disk_create(outputfile, "raw", inputsize + extrasize)
-
-    # Attach the new disk image to libguestfs.
-    g.add_drive_opts(outputfile, format="raw", readonly=0)
-
-    # Run the libguestfs back-end.
-    g.launch()
-
-    # Get the list of devices.  Because we only added one drive
-    # above, we expect that this list should contain a single
-    # element.
-    devices = g.list_devices()
-    assert(len(devices) == 1)
-
-    # Partition the disk as one single MBR partition.
-    g.part_disk(devices[0], "mbr")
-
-    # Get the list of partitions.  We expect a single element, which
-    # is the partition we have just created.
-    partitions = g.list_partitions()
-    assert(len(partitions) == 1)
-
-    # Create a filesystem on the partition.
-    # NOTE: extlinux does not support 64-bit file systems
-    g.mkfs("ext4", partitions[0], features="^64bit")
-
-    # Now mount the filesystem so that we can add files.
-    g.mount(partitions[0], "/")
-
-    # Upload file system files and directories.
-    g.tar_in(inputfile, "/")
-
-    # Install the boot loader
-    g.extlinux("/boot")
-
-    # Unmount the file systems.
-    g.umount_all();
-
-    # Write the master boot record.
-    with open(MBR_FILE, mode='rb') as mbr:
-        mbr_data = mbr.read()
-        assert(len(mbr_data) == MBR_SIZE)
-        g.pwrite_device(devices[0], mbr_data, 0)
-
-    # Mark the device as bootable.
-    g.part_set_bootable(devices[0], 1, 1)
-    
-    # Label the boot disk for root identification
-    g.set_label(partitions[0], "wrs_guest")
-
-    # Shutdown and close guest image
-    g.shutdown()
-    g.close()
-
-
-def exit_usage(result=0):
-    print('USAGE: -i <input-file> -o <output-file> [-s <extra-bytes>]')
-    sys.exit(result)
-
-
-def main(argv):
-    inputfile = None
-    outputfile = None
-    extrasize = None
-    trace = False
-
-    try:
-        opts, args = getopt.getopt(argv,"hxi:o:s:",
-                                   ["input=", "output=", "size="])
-    except getopt.GetoptError:
-        exit_usage(2)
-    for opt, arg in opts:
-        if opt == '-h':
-            exit_usage()
-        if opt == '-x':
-            trace = True
-        elif opt in ("-i", "--input"):
-            inputfile = arg
-        elif opt in ("-o", "--output"):
-            outputfile = arg
-        elif opt in ("-s", "--size"):
-            extrasize = int(arg)
-
-    if not inputfile:
-        print(stderr, "ERROR: missing input file")
-        exit_usage(-1)
-
-    if not outputfile:
-        print(stderr, "ERROR: missing output file")
-        exit_usage(-1)
-
-    if not extrasize:
-        extrasize = 0
-
-    build_image(inputfile, outputfile, extrasize, trace)
-
-
-if __name__ == "__main__":
-    main(sys.argv[1:])
diff --git a/build-tools/build_guest/image-rt.inc b/build-tools/build_guest/image-rt.inc
deleted file mode 100644
index 4526d49e..00000000
--- a/build-tools/build_guest/image-rt.inc
+++ /dev/null
@@ -1,14 +0,0 @@
-# List of packages to be included/installed in RT guest image
-# If these have dependencies, they will be pulled in automatically
-#
-
-# This will help us have our automation debug TC failures when pings to VMs fail. 
-qemu-guest-agent
-
-# Add debugging tools
-zip
-unzip
-traceroute
-
-# Add cfn-push-stats for heat demos
-heat-cfntools
diff --git a/build-tools/build_guest/image.inc b/build-tools/build_guest/image.inc
deleted file mode 100644
index dc3c9504..00000000
--- a/build-tools/build_guest/image.inc
+++ /dev/null
@@ -1,14 +0,0 @@
-# List of packages to be included/installed in guest image
-# If these have dependencies, they will be pulled in automatically
-#
-
-# This will help us have our automation debug TC failures when pings to VMs fail. 
-qemu-guest-agent
-
-# Add debugging tools
-zip
-unzip
-traceroute
-
-# Add cfn-push-stats for heat demos
-heat-cfntools
diff --git a/build-tools/build_guest/rootfs-exclude.txt b/build-tools/build_guest/rootfs-exclude.txt
deleted file mode 100644
index 273a301f..00000000
--- a/build-tools/build_guest/rootfs-exclude.txt
+++ /dev/null
@@ -1,13 +0,0 @@
-# exclude special filesystems
-/builddir
-/dev/*
-/proc/*
-/tmp/*
-/sys/*
-/root/rootfs.tar
-
-# exclude local repo yum configuration
-/etc/yum/yum.conf
-
-# omit platform hooks to check install uuid
-/etc/dhcp/dhclient-enter-hooks
diff --git a/build-tools/build_guest/rootfs-rt/boot/extlinux.conf b/build-tools/build_guest/rootfs-rt/boot/extlinux.conf
deleted file mode 100644
index d57fd306..00000000
--- a/build-tools/build_guest/rootfs-rt/boot/extlinux.conf
+++ /dev/null
@@ -1,7 +0,0 @@
-SERIAL 0 115200
-
-DEFAULT linux
-LABEL linux
-  KERNEL vmlinuz
-  INITRD initramfs.img
-  APPEND rw root=LABEL=wrs_guest clocksource=pit console=tty0 console=ttyS0 biosdevname=0 net.ifnames=0 no_timer_check audit=0 cgroup_disable=memory isolcpus=1-3 irqaffinity=0  nmi_watchdog=0 softlockup_panic=0 intel_idle.max_cstate=0 processor.max_cstate=1 idle=poll
diff --git a/build-tools/build_guest/rootfs-setup.sh b/build-tools/build_guest/rootfs-setup.sh
deleted file mode 100755
index 40a51f62..00000000
--- a/build-tools/build_guest/rootfs-setup.sh
+++ /dev/null
@@ -1,92 +0,0 @@
-#!/bin/bash
-
-BUILD_MODE=''
-if [ "$1" == "--rt" ]; then
-    BUILD_MODE="rt"
-fi
-if [ "$1" == "--std" ]; then
-    BUILD_MODE="std"
-fi
-
-# Setup boot directory for syslinux configuration (/boot/extlinux.conf)
-ln -s $(ls /boot/vmlinuz-*.x86_64 | head -1) /boot/vmlinuz
-ln -s $(ls /boot/initramfs-*.x86_64.img | head -1) /boot/initramfs.img
-
-# Setup root and sysadmin users
-usermod -p $(openssl passwd -1 root) root
-useradd -p $(openssl passwd -1 sysadmin) sysadmin
-
-# Enable SUDO access for sysadmin
-echo "sysadmin ALL=(ALL) NOPASSWD: ALL" >> /etc/sudoers
-
-# Enable remote root login to permit automated tools to run privileged commands
-sed -i 's%^#\(PermitRootLogin \)%\1%' /etc/ssh/sshd_config
-sed -i 's#^\(PermitRootLogin \).*#\1yes#' /etc/ssh/sshd_config
-
-# Enable password login to permit automated tools to run commands
-sed -i 's%^#\(PasswordAuthentication \)%\1%' /etc/ssh/sshd_config
-sed -i 's#^\(PasswordAuthentication \).*#\1yes#' /etc/ssh/sshd_config
-
-# Disable PAM authentication
-sed -i 's#^\(UsePAM \).*#\1no#' /etc/ssh/sshd_config
-
-# Prevent cloud_init for reverting our changes
-sed -i 's#^\(ssh_pwauth:\).*#\1 1#' /etc/cloud/cloud.cfg
-sed -i 's#^\(disable_root:\).*#\1 0#' /etc/cloud/cloud.cfg
-
-# Setup SSHD to mark packets for QoS processing in the host (this seems to
-# be broken in our version of SSHd so equivalent iptables rules are being
-# added to compensate.
-echo "IPQoS cs7" >> /etc/ssh/sshd_config
-
-# Disable reverse path filtering to permit traffic testing from
-# foreign routes.
-sed -i 's#^\(net.ipv4.conf.*.rp_filter=\).*#\10#' /etc/sysctl.conf
-
-# Change /etc/rc.local to touch a file to indicate that the init has
-# completed.  This is required by the AVS vbenchmark tool so that it knows
-# that the VM is ready to run.  This was added because VM instances take a
-# long time (2-3 minutes) to resize their filesystem when run on a system with
-# HDD instead of SSD.
-chmod +x /etc/rc.d/rc.local
-echo "touch /var/run/.init-complete" >> /etc/rc.local
-
-if [ "$BUILD_MODE" == "rt" ]; then
-   # Adjust system tuning knobs during init when using rt kernel (CGTS-7047)
-    echo "echo 1 > /sys/devices/virtual/workqueue/cpumask" >> /etc/rc.local
-    echo "echo 1 > /sys/bus/workqueue/devices/writeback/cpumask" >> /etc/rc.local
-    echo "echo -1 > /proc/sys/kernel/sched_rt_runtime_us" >> /etc/rc.local
-    echo "echo 0 > /proc/sys/kernel/timer_migration" >> /etc/rc.local
-    echo "echo 10 > /proc/sys/vm/stat_interval" >> /etc/rc.local
-fi
-
-# Disable audit service by default
-# With this enabled, it causes system delays when running at maximum
-# capacity that impacts the traffic processing enough to cause unclean
-# traffic runs when doing benchmark tests.
-systemctl disable auditd
-
-if [ "$BUILD_MODE" == "rt" ]; then
-   # Additional services to disable on rt guest (CGTS-7047)
-    systemctl disable polkit.service
-    systemctl disable tuned.service
-fi
-
-# Clean the yum cache.  We don't want to maintain it on the guest file system.
-yum clean all
-
-# update /etc/rsyslog.conf to have OmitLocalLogging off
-if [ -f /etc/rsyslog.conf ]; then
-    sed -i 's#OmitLocalLogging on#OmitLocalLogging off#g' /etc/rsyslog.conf
-fi
-
-# select correct kernel and initrd
-if [ "$BUILD_MODE" == "rt" ]; then
-    PATTERN=$(rpm -q --qf '%{VERSION}-%{RELEASE}' kernel-rt)
-else
-    PATTERN=$(rpm -q --qf '%{VERSION}-%{RELEASE}' kernel)
-fi
-cd /boot
-rm -f vmlinuz initramfs.img
-ln -s $(ls -1 vmlinuz-$PATTERN*) vmlinuz
-ln -s $(ls -1 initramfs-$PATTERN*img) initramfs.img
diff --git a/build-tools/build_guest/rootfs-std/boot/extlinux.conf b/build-tools/build_guest/rootfs-std/boot/extlinux.conf
deleted file mode 100644
index fff8aadd..00000000
--- a/build-tools/build_guest/rootfs-std/boot/extlinux.conf
+++ /dev/null
@@ -1,7 +0,0 @@
-SERIAL 0 115200
-
-DEFAULT linux
-LABEL linux
-  KERNEL vmlinuz
-  INITRD initramfs.img
-  APPEND rw root=LABEL=wrs_guest clocksource=pit console=tty0 console=ttyS0 biosdevname=0 net.ifnames=0 no_timer_check
diff --git a/build-tools/build_guest/rootfs/etc/cloud/cloud.cfg.d/99_wrs-datasources.cfg b/build-tools/build_guest/rootfs/etc/cloud/cloud.cfg.d/99_wrs-datasources.cfg
deleted file mode 100644
index 0fc57890..00000000
--- a/build-tools/build_guest/rootfs/etc/cloud/cloud.cfg.d/99_wrs-datasources.cfg
+++ /dev/null
@@ -1,18 +0,0 @@
-# Override the datasource list to use only those that are expected (and needed)
-# to work in our lab environment.
-#
-datasource_list:
- - NoCloud
- - ConfigDrive
- - Ec2
- - None
-
-# Adjust the Ec2 max_wait to be 30 seconds instead of the default 120 seconds,
-# and set the list of URLs to be the only one that we expect to work in our lab
-# environment so that we avoid DNS lookup failures for alternate choices.
-#
-datasource:
-  Ec2:
-    timeout: 10
-    max_wait: 30
-    metadata_urls: ['http://169.254.169.254']
diff --git a/build-tools/build_guest/rootfs/etc/dhcp/dhclient.conf b/build-tools/build_guest/rootfs/etc/dhcp/dhclient.conf
deleted file mode 100644
index 356713a5..00000000
--- a/build-tools/build_guest/rootfs/etc/dhcp/dhclient.conf
+++ /dev/null
@@ -1,21 +0,0 @@
-## Use a CID based on the hardware address for both IPv4 and IPv6.  This mostly
-## useful for IPv6 to ensure that the client is not using a random DUID for the
-## CID on each reboot.
-send dhcp6.client-id = concat(00:03:00, hardware);
-send dhcp-client-identifier = concat(00:03:00, hardware);
-
-## Defaults for all interfaces
-request interface-mtu, subnet-mask, broadcast-address, time-offset,
-    classless-static-routes;
-
-interface "eth0" {
-    ## Override for eth0 to add requests for attributes that we only care to 
-    ## configure for our primary network interface
-    request interface-mtu, subnet-mask, broadcast-address, time-offset,
-        domain-name, domain-name-servers, host-name,
-        classless-static-routes, routers;
-}
-
-timeout 15;
-
-retry 5;
diff --git a/build-tools/build_guest/rootfs/etc/iptables.rules b/build-tools/build_guest/rootfs/etc/iptables.rules
deleted file mode 100644
index 293aee95..00000000
--- a/build-tools/build_guest/rootfs/etc/iptables.rules
+++ /dev/null
@@ -1,12 +0,0 @@
-*mangle
-:PREROUTING ACCEPT [0:0]
-:INPUT ACCEPT [0:0]
-:FORWARD ACCEPT [0:0]
-:OUTPUT ACCEPT [0:0]
-:POSTROUTING ACCEPT [0:0]
--A OUTPUT -o eth0 -p tcp --sport 22 -j DSCP --set-dscp-class CS7
--A OUTPUT -o eth0 -p tcp --dport 22 -j DSCP --set-dscp-class CS7
--A OUTPUT -o eth0 -p udp --sport 67:68 -j DSCP --set-dscp-class CS7
--A OUTPUT -o eth0 -p udp --dport 67:68 -j DSCP --set-dscp-class CS7
--A OUTPUT -o eth0 -d 169.254.169.254 -j DSCP --set-dscp-class CS7
-COMMIT
diff --git a/build-tools/build_guest/rootfs/etc/modprobe.d/floppy.conf b/build-tools/build_guest/rootfs/etc/modprobe.d/floppy.conf
deleted file mode 100644
index 81e9704e..00000000
--- a/build-tools/build_guest/rootfs/etc/modprobe.d/floppy.conf
+++ /dev/null
@@ -1 +0,0 @@
-blacklist floppy
diff --git a/build-tools/build_guest/rootfs/etc/modprobe.d/wrs_avp.conf b/build-tools/build_guest/rootfs/etc/modprobe.d/wrs_avp.conf
deleted file mode 100644
index cf8f9947..00000000
--- a/build-tools/build_guest/rootfs/etc/modprobe.d/wrs_avp.conf
+++ /dev/null
@@ -1 +0,0 @@
-options wrs_avp kthread_cpulist=0-7 kthread_policy=0
diff --git a/build-tools/build_guest/rootfs/etc/modules-load.d/wrs_avp.conf b/build-tools/build_guest/rootfs/etc/modules-load.d/wrs_avp.conf
deleted file mode 100644
index 988b8bff..00000000
--- a/build-tools/build_guest/rootfs/etc/modules-load.d/wrs_avp.conf
+++ /dev/null
@@ -1 +0,0 @@
-wrs_avp
diff --git a/build-tools/build_guest/rootfs/etc/sysconfig/network-scripts/ifcfg-eth0 b/build-tools/build_guest/rootfs/etc/sysconfig/network-scripts/ifcfg-eth0
deleted file mode 100644
index 73ac446c..00000000
--- a/build-tools/build_guest/rootfs/etc/sysconfig/network-scripts/ifcfg-eth0
+++ /dev/null
@@ -1,8 +0,0 @@
-DEVICE=eth0 
-BOOTPROTO=dhcp 
-ONBOOT=yes
-TYPE=Ethernet
-USERCTL=yes
-PEERDNS=yes
-IPV6INIT=no
-PERSISTENT_DHCLIENT=1
diff --git a/build-tools/build_guest/rootfs/etc/udev/rules.d/65-renumber-net.rules b/build-tools/build_guest/rootfs/etc/udev/rules.d/65-renumber-net.rules
deleted file mode 100644
index f5c68e36..00000000
--- a/build-tools/build_guest/rootfs/etc/udev/rules.d/65-renumber-net.rules
+++ /dev/null
@@ -1,4 +0,0 @@
-# Renames interfaces to be sequential ethX interface names regardless of interface type
-# This is required to avoid a kernel host patch that starts number at 1000 and to
-# override slot specific naming for non-kernel interfaces.
-ACTION=="add", SUBSYSTEM=="net", DRIVERS=="?*", ATTR{type}=="1", KERNEL=="eth?*" PROGRAM=="/usr/lib/udev/renumber_device", NAME="$result"
diff --git a/build-tools/build_guest/rootfs/usr/lib/udev/renumber_device b/build-tools/build_guest/rootfs/usr/lib/udev/renumber_device
deleted file mode 100755
index c9d184b5..00000000
--- a/build-tools/build_guest/rootfs/usr/lib/udev/renumber_device
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/bash
-# Renames interfaces to be sequential ethX interface names regardless of interface type
-# This is required to avoid a kernel host patch that starts number at 1000 and to
-# override slot specific naming for non-kernel interfaces.
-
-# The ifindex for the first interface that is not 'lo' will be 2.
-# Therefore adjust the numbering to start at 0 for eth0..ethN naming
-
-INDEX=$(($IFINDEX-2))
-echo "eth$INDEX"
-
-exit 0
diff --git a/build-tools/build_guest/rpm-install-list-rt.txt b/build-tools/build_guest/rpm-install-list-rt.txt
deleted file mode 100644
index 521d55c9..00000000
--- a/build-tools/build_guest/rpm-install-list-rt.txt
+++ /dev/null
@@ -1,294 +0,0 @@
-# list of standard packages to include in the guest image
-acl
-acpid
-audit
-audit-libs
-audit-libs-python
-authconfig
-basesystem
-bash
-bind-libs-lite
-bind-license
-binutils
-bridge-utils
-btrfs-progs
-bzip2-libs
-ca-certificates
-centos-logos
-centos-release
-checkpolicy
-chkconfig
-cloud-init
-coreutils
-cpio
-cracklib
-cracklib-dicts
-cronie
-cronie-anacron
-crontabs
-cryptsetup-libs
-curl
-cyrus-sasl-lib
-dbus
-dbus-glib
-dbus-libs
-dbus-python
-device-mapper
-device-mapper-libs
-dhclient
-dhcp-common
-dhcp-libs
-diffutils
-dmidecode
-dnsmasq
-dracut
-dracut-config-rescue
-dracut-network
-e2fsprogs
-e2fsprogs-libs
-elfutils-libelf
-elfutils-libs
-ethtool
-expat
-file
-file-libs
-filesystem
-findutils
-fipscheck
-fipscheck-lib
-freetype
-gawk
-gdbm
-gettext
-gettext-libs
-glib2
-glibc
-glibc-common
-glib-networking
-gmp
-gnupg2
-gnutls
-gobject-introspection
-gpgme
-grep
-groff-base
-grub2
-grub2-tools
-grubby
-gsettings-desktop-schemas
-gssproxy
-gzip
-hardlink
-hostname
-info
-initscripts
-iperf3
-iproute
-iptables
-iputils
-jansson
-jbigkit-libs
-json-c
-kbd
-kbd-legacy
-kbd-misc
-kernel-rt
-kernel-rt-tools
-kernel-rt-tools-libs
-kexec-tools
-keyutils
-keyutils-libs
-kmod
-kmod-libs
-kpartx
-krb5-libs
-less
-libacl
-libassuan
-libattr
-libbasicobjects
-libblkid
-libcap
-libcap-ng
-libcgroup
-libcollection
-libcom_err
-libcroco
-libcurl
-libdaemon
-libdb
-libdb-utils
-libedit
-libestr
-libevent
-libffi
-libgcc
-libgcrypt
-libgomp
-libgpg-error
-libgudev1
-libidn
-libini_config
-libjpeg-turbo
-libmnl
-libmodman
-libmount
-libndp
-libnetfilter_conntrack
-libnfnetlink
-libnfsidmap
-libnl3
-libnl3-cli
-libpath_utils
-libpcap
-libpipeline
-libproxy
-libpwquality
-libref_array
-libselinux
-libselinux-python
-libselinux-utils
-libsemanage
-libsemanage-python
-libsepol
-libsoup
-libss
-libssh2
-libstdc++
-libsysfs
-libtalloc
-libtasn1
-libteam
-libtevent
-libtiff
-libtirpc
-libunistring
-libuser
-libutempter
-libuuid
-libverto
-libverto-tevent
-libwebp
-libxml2
-libyaml
-logrotate
-lua
-lzo
-make
-man-db
-mariadb-libs
-microcode_ctl
-mozjs17
-ncurses
-ncurses-base
-ncurses-libs
-nettle
-net-tools
-newt
-newt-python
-nfs-utils
-nspr
-nss
-nss-softokn
-nss-softokn-freebl
-nss-sysinit
-nss-tools
-nss-util
-numactl-libs
-openssh
-openssh-clients
-openssh-server
-openssl
-openssl-libs
-os-prober
-p11-kit
-p11-kit-trust
-pam
-parted
-passwd
-pciutils
-pciutils-libs
-pcre
-pinentry
-pkgconfig
-policycoreutils
-policycoreutils-python
-polkit
-polkit-pkla-compat
-popt
-procps-ng
-pth
-pygobject3-base
-pygpgme
-pyliblzma
-python
-python-backports
-python-backports-ssl_match_hostname
-python-chardet
-python-configobj
-python-decorator
-python-iniparse
-python-IPy
-python-jsonpatch
-python-jsonpointer
-python-kitchen
-python-libs
-python-perf
-python-pillow
-python-prettytable
-python-pycurl
-python-pygments
-python-pyudev
-python-requests
-python2-six
-python-urlgrabber
-python-urllib3
-pyxattr
-PyYAML
-qrencode-libs
-quota
-quota-nls
-rdma
-readline
-rootfiles
-rpcbind
-rpm
-rpm-build-libs
-rpm-libs
-rpm-python
-rsync
-rsyslog
-sed
-rt-setup
-rtctl
-shadow-utils
-shared-mime-info
-slang
-snappy
-sqlite
-sudo
-systemd
-systemd-libs
-systemd-sysv
-sysvinit-tools
-tar
-tcpdump
-tcp_wrappers
-tcp_wrappers-libs
-teamd
-trousers
-tuned
-tzdata
-ustr
-util-linux
-vim-minimal
-virt-what
-wget
-which
-xz
-xz-libs
-yum
-yum-metadata-parser
-yum-plugin-fastestmirror
-yum-utils
-zlib
diff --git a/build-tools/build_guest/rpm-install-list.txt b/build-tools/build_guest/rpm-install-list.txt
deleted file mode 100644
index 61200f8b..00000000
--- a/build-tools/build_guest/rpm-install-list.txt
+++ /dev/null
@@ -1,291 +0,0 @@
-# list of standard packages to include in the guest image
-acl
-acpid
-audit
-audit-libs
-audit-libs-python
-authconfig
-basesystem
-bash
-bind-libs-lite
-bind-license
-binutils
-bridge-utils
-btrfs-progs
-bzip2-libs
-ca-certificates
-centos-logos
-centos-release
-checkpolicy
-chkconfig
-cloud-init
-coreutils
-cpio
-cracklib
-cracklib-dicts
-cronie
-cronie-anacron
-crontabs
-cryptsetup-libs
-curl
-cyrus-sasl-lib
-dbus
-dbus-glib
-dbus-libs
-dbus-python
-device-mapper
-device-mapper-libs
-dhclient
-dhcp-common
-dhcp-libs
-diffutils
-dmidecode
-dnsmasq
-dracut
-dracut-config-rescue
-dracut-network
-e2fsprogs
-e2fsprogs-libs
-elfutils-libelf
-elfutils-libs
-ethtool
-expat
-file
-file-libs
-filesystem
-findutils
-fipscheck
-fipscheck-lib
-freetype
-gawk
-gdbm
-gettext
-gettext-libs
-glib2
-glibc
-glibc-common
-glib-networking
-gmp
-gnupg2
-gnutls
-gobject-introspection
-gpgme
-grep
-groff-base
-grub2
-grub2-tools
-grubby
-gsettings-desktop-schemas
-gssproxy
-gzip
-hardlink
-hostname
-info
-initscripts
-iperf3
-iproute
-iptables
-iputils
-jansson
-jbigkit-libs
-json-c
-kbd
-kbd-legacy
-kbd-misc
-kernel
-kernel-tools
-kernel-tools-libs
-kexec-tools
-keyutils
-keyutils-libs
-kmod
-kmod-libs
-kpartx
-krb5-libs
-less
-libacl
-libassuan
-libattr
-libbasicobjects
-libblkid
-libcap
-libcap-ng
-libcgroup
-libcollection
-libcom_err
-libcroco
-libcurl
-libdaemon
-libdb
-libdb-utils
-libedit
-libestr
-libevent
-libffi
-libgcc
-libgcrypt
-libgomp
-libgpg-error
-libgudev1
-libidn
-libini_config
-libjpeg-turbo
-libmnl
-libmodman
-libmount
-libndp
-libnetfilter_conntrack
-libnfnetlink
-libnfsidmap
-libnl3
-libnl3-cli
-libpath_utils
-libpcap
-libpipeline
-libproxy
-libpwquality
-libref_array
-libselinux
-libselinux-python
-libselinux-utils
-libsemanage
-libsemanage-python
-libsepol
-libsoup
-libss
-libssh2
-libstdc++
-libsysfs
-libtalloc
-libtasn1
-libteam
-libtevent
-libtiff
-libtirpc
-libunistring
-libuser
-libutempter
-libuuid
-libverto
-libverto-tevent
-libwebp
-libxml2
-libyaml
-logrotate
-lua
-lzo
-make
-man-db
-mariadb-libs
-microcode_ctl
-mozjs17
-ncurses
-ncurses-base
-ncurses-libs
-nettle
-net-tools
-newt
-newt-python
-nfs-utils
-nspr
-nss
-nss-softokn
-nss-softokn-freebl
-nss-sysinit
-nss-tools
-nss-util
-numactl-libs
-openssh
-openssh-clients
-openssh-server
-openssl
-openssl-libs
-os-prober
-p11-kit
-p11-kit-trust
-pam
-parted
-passwd
-pciutils
-pciutils-libs
-pcre
-pinentry
-pkgconfig
-policycoreutils
-policycoreutils-python
-polkit
-polkit-pkla-compat
-popt
-procps-ng
-pth
-pygobject3-base
-pygpgme
-pyliblzma
-python
-python-backports
-python-backports-ssl_match_hostname
-python-chardet
-python-configobj
-python-decorator
-python-iniparse
-python-IPy
-python-jsonpatch
-python-jsonpointer
-python-kitchen
-python-libs
-python-perf
-python-pillow
-python-prettytable
-python-pycurl
-python-pygments
-python-pyudev
-python-requests
-python2-six
-python-urlgrabber
-python-urllib3
-pyxattr
-PyYAML
-qrencode-libs
-quota
-quota-nls
-rdma
-readline
-rootfiles
-rpcbind
-rpm
-rpm-build-libs
-rpm-libs
-rpm-python
-rsync
-sed
-setup
-shadow-utils
-shared-mime-info
-slang
-snappy
-sqlite
-sudo
-systemd
-systemd-libs
-systemd-sysv
-sysvinit-tools
-tar
-tcpdump
-tcp_wrappers
-tcp_wrappers-libs
-teamd
-trousers
-tzdata
-ustr
-util-linux
-vim-enhanced
-virt-what
-wget
-which
-xz
-xz-libs
-yum
-yum-metadata-parser
-yum-plugin-fastestmirror
-yum-utils
-zlib
diff --git a/build-tools/build_guest/rpm-remove-list.txt b/build-tools/build_guest/rpm-remove-list.txt
deleted file mode 100644
index 4c355b2e..00000000
--- a/build-tools/build_guest/rpm-remove-list.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-# list of packages to be excluded from guest image
-cpp
-gcc
-gcc-c++
-gdb
-linux-firmware
-rpm-build
diff --git a/build-tools/build_iso/anaconda-ks.cfg b/build-tools/build_iso/anaconda-ks.cfg
deleted file mode 100644
index 24d8d488..00000000
--- a/build-tools/build_iso/anaconda-ks.cfg
+++ /dev/null
@@ -1,40 +0,0 @@
-#version=DEVEL
-# System authorization information
-auth --enableshadow --passalgo=sha512
-# Use CDROM installation media
-cdrom
-# Use graphical install
-graphical
-# Run the Setup Agent on first boot
-firstboot --enable
-ignoredisk --only-use=sda
-# Keyboard layouts
-keyboard --vckeymap=us --xlayouts='us'
-# System language
-lang en_US.UTF-8
-
-# Network information
-network  --bootproto=dhcp --device=enp0s3 --onboot=off --ipv6=auto
-network  --bootproto=static --device=enp0s8 --ip=10.10.10.10 --netmask=255.255.255.0 --ipv6=auto --activate
-network --device=lo  --hostname=localhost.localdomain
-
-#Root password
-rootpw --lock
-# System timezone
-timezone America/New_York --isUtc
-user --groups=wheel --name=sysadmin --password=$6$Mazui8NX.w6C5I$UWNzOnui.vb3qOT3Qyw0I6hMLW0G02KfQGcCZTXdVv9GDZLUXHJVeGEN1/RAe.EOgz2cLkFkVaS8pvwBTFG1j/ --iscrypted --gecos="sysadmin"
-# System bootloader configuration
-bootloader --location=mbr --boot-drive=sda
-autopart --type=lvm
-# Partition clearing information
-clearpart --all --initlabel --drives=sda
-
-%packages
-@^minimal
-@core
-
-%end
-
-%addon com_redhat_kdump --disable --reserve-mb='auto'
-
-%end
diff --git a/build-tools/build_iso/cgts_deps.sh b/build-tools/build_iso/cgts_deps.sh
deleted file mode 100755
index ab45352d..00000000
--- a/build-tools/build_iso/cgts_deps.sh
+++ /dev/null
@@ -1,352 +0,0 @@
-#!/bin/env bash
-
-#
-# Copyright (c) 2018-2020 Wind River Systems, Inc.
-#
-# SPDX-License-Identifier: Apache-2.0
-#
-
-# Here's the score, kids.  There are a few different places from which we can
-# get packages.  In priority order, they are:
-#
-# The CGTS packages we've built ourselves
-# The CGTS packages that Jenkins has built (coming soon to a script near you)
-# The CentOS packages in various repos
-#    - Base OS
-#    - OpenStack Repos
-# EPEL (Extra Packages for Enterprise Linux)
-#
-# This script can function in two ways:
-#   If you specify a filename, it assumes the file is a list of packages you
-#      want to install, or dependencies you want to meet.  It installs whatever
-#      is in the list into current directory.  Failure to find a dependency
-#      results in a return code of 1
-#
-#   If no file is specified, we generate a file ($DEPLISTFILE) of dependencies
-#      based on current directory
-#
-# We then continuously loop through generating new dependencies and installing
-#  them until either all dependencies are met, or we cannot install anymore
-#
-# We also log where dependencies were installed from into
-#   export/dist/report_deps.txt
-#
-
-CGTS_DEPS_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}" )" )"
-
-# Set REPOQUERY, REPOQUERY_SUB_COMMAND, REPOQUERY_RESOLVE and
-# REPOQUERY_WHATPROVIDES_DELIM for our build environment.
-source ${CGTS_DEPS_DIR}/../pkg-manager-utils.sh
-
-# This function generates a simple file of dependencies we're trying to resolve
-function generate_dep_list {
-    TMP_RPM_DB=$(mktemp -d $(pwd)/tmp_rpm_db_XXXXXX)
-    mkdir -p $TMP_RPM_DB
-    rpm --initdb --dbpath $TMP_RPM_DB
-    rpm --dbpath $TMP_RPM_DB --test -Uvh --replacefiles '*.rpm' > $DEPLISTFILE_NEW 2>&1
-    cat $DEPLISTFILE_NEW >> $DEPDETAILLISTFILE
-    cat $DEPLISTFILE_NEW \
-        | grep -v   -e "error:" -e "warning:" -e "Preparing..." \
-                    -e "Verifying..." -e "installing package" \
-        | sed -e "s/ is needed by.*$//" -e "s/ [<=>].*$//" \
-        | sort -u > $DEPLISTFILE
-    \rm -rf $TMP_RPM_DB
-}
-
-join_array() {
-    local IFS="$1"
-    shift
-    echo "$*"
-}
-
-# Takes a list of requirements (either explcit package name, or capabilities
-# to provide) and install packages to meet those dependancies
-#
-# We take the list of requirements and first try to look them up based on
-# package name.  If we can't find a package with the name of the requirement,
-# we use --whatprovides to complete the lookup.
-#
-# The reason for this initial name-based attempt is that a couple of funky
-# packages (notably -devel packages) have "Provides:" capabilities which
-# conflict with named packages.  So if explictly say we want "xyz" then we'll
-# install the "xyz" package, rather than "something-devel" which has "xyz"
-# capabilities.
-function install_deps {
-    local DEP_LIST=""
-    local DEP_LIST_ARRAY=()
-    local DEP_LIST_FILE="$1"
-
-    # Temporary files are used in a few different ways
-    # Here we essenitally create variable aliases to make it easier to read
-    # the script
-    local UNSORTED_PACKAGES=$TMPFILE
-    local SORTED_PACKAGES=$TMPFILE1
-    local UNRESOLVED_PACKAGES=$TMPFILE2
-
-    rm -f $UNSORTED_PACKAGES
-
-    while read DEP
-    do
-        DEP_LIST+=" '${DEP}'"
-    done < $DEP_LIST_FILE
-
-    echo "Debug: List of deps to resolve: ${DEP_LIST}"
-
-    if [ -z "${DEP_LIST}" ]; then
-        return 0
-    fi
-
-    # go through each repo and convert deps to packages based on package name
-    for REPOID in `grep  '^[[].*[]]$' $YUM | grep -v '[[]main[]]' | awk -F '[][]' '{print $2 }'`; do
-        echo "TMPDIR=${TMP_DIR}"\
-             "${REPOQUERY} --config=${YUM} --repoid=$REPOID"\
-             "${REPOQUERY_SUB_COMMAND} --arch=x86_64,noarch"\
-             "${DEP_LIST} --qf='%{name}'"
-
-        TMPDIR=${TMP_DIR} \
-            ${REPOQUERY} --config=${YUM} --repoid=$REPOID \
-            ${REPOQUERY_SUB_COMMAND} --arch=x86_64,noarch \
-            --qf='%{name}' ${DEP_LIST} \
-            | sed "s/kernel-debug/kernel/g" >> $UNSORTED_PACKAGES
-
-        \rm -rf $TMP_DIR/yum-$USER-*
-    done
-    sort $UNSORTED_PACKAGES -u > $SORTED_PACKAGES
-
-    # figure out any dependancies which could not be resolved based on
-    # package name.  We use --whatpovides to deal with this
-    #
-    # First, we build a new DEP_LIST based on what was NOT found in
-    # search-by-name attempt
-    sort $DEP_LIST_FILE -u > $TMPFILE
-    comm -2 -3 $TMPFILE $SORTED_PACKAGES > $UNRESOLVED_PACKAGES
-
-    # If there are any requirements not resolved, look up the packages with
-    # --whatprovides
-    if [ -s $UNRESOLVED_PACKAGES ]; then
-        DEP_LIST_ARRAY=()
-        \cp $SORTED_PACKAGES $UNSORTED_PACKAGES
-        while read DEP
-        do
-            DEP_LIST_ARRAY+=( "${DEP}" )
-        done < $UNRESOLVED_PACKAGES
-
-        if [ "${REPOQUERY_WHATPROVIDES_DELIM}" != " " ]; then
-            DEP_LIST_ARRAY=( "$(join_array "${REPOQUERY_WHATPROVIDES_DELIM}" "${DEP_LIST_ARRAY[@]}" )" )
-        fi
-
-        if [ ${#DEP_LIST_ARRAY[@]} -gt 0 ]; then
-
-            for REPOID in `grep  '^[[].*[]]$' $YUM | grep -v '[[]main[]]' | awk -F '[][]' '{print $2 }'`; do
-                echo "TMPDIR=${TMP_DIR}"\
-                     "${REPOQUERY} --config=${YUM} --repoid=${REPOID}"\
-                     "${REPOQUERY_SUB_COMMAND} --arch=x86_64,noarch"\
-                     "--qf='%{name}' --whatprovides ${DEP_LIST_ARRAY[@]}"
-
-                TMPDIR=${TMP_DIR} \
-                    ${REPOQUERY} --config=${YUM} --repoid=${REPOID} \
-                    ${REPOQUERY_SUB_COMMAND} --arch=x86_64,noarch \
-                    --qf='%{name}' --whatprovides ${DEP_LIST_ARRAY[@]} \
-                    | sed "s/kernel-debug/kernel/g" >> $UNSORTED_PACKAGES
-
-                \rm -rf $TMP_DIR/yum-$USER-*
-            done
-        fi
-
-        sort -u $UNSORTED_PACKAGES > $SORTED_PACKAGES
-    fi
-
-    # clean up
-    \rm -f $UNSORTED_PACKAGES $UNRESOLVED_PACKAGES
-
-    # We now have, in SORTED_PACKAGES, a list of all packages that we need to install
-    # to meet our dependancies
-    DEP_LIST=" "
-    while read DEP
-    do
-        DEP_LIST+="${DEP} "
-    done < $SORTED_PACKAGES
-    rm $SORTED_PACKAGES
-
-    # go through each repo and install packages
-    local TARGETS="${DEP_LIST}"
-    echo "Debug: Resolved list of deps to install: ${TARGETS}"
-    local UNRESOLVED
-    for REPOID in `grep  '^[[].*[]]$' $YUM | grep -v '[[]main[]]' | awk -F '[][]' '{print $2 }'`; do
-        UNRESOLVED="$TARGETS"
-
-        if [[ ! -z "${TARGETS// }" ]]; then
-            REPO_PATH=$(cat $YUM | sed -n "/^\[$REPOID\]\$/,\$p" | grep '^baseurl=' | head -n 1 | awk -F 'file://' '{print $2}' | sed 's:/$::')
-
-            >&2 echo "TMPDIR=${TMP_DIR}"\
-                    "${REPOQUERY} --config=${YUM} --repoid=${REPOID}"\
-                    "${REPOQUERY_SUB_COMMAND} --arch=x86_64,noarch"\
-                    "--qf='%{name} %{name}-%{version}-%{release}.%{arch}.rpm %{relativepath}'"\
-                    "${REPOQUERY_RESOLVE} ${TARGETS}"
-
-            TMPDIR=${TMP_DIR} \
-                ${REPOQUERY} --config=${YUM} --repoid=${REPOID} \
-                ${REPOQUERY_SUB_COMMAND} --arch=x86_64,noarch \
-                --qf="%{name} %{name}-%{version}-%{release}.%{arch}.rpm %{relativepath}" \
-                ${REPOQUERY_RESOLVE} ${TARGETS} \
-                | sort -r -V > $TMPFILE
-
-            \rm -rf $TMP_DIR/yum-$USER-*
-
-            while read STR
-            do
-                >&2 echo "STR=$STR"
-                if [ "x$STR" == "x" ]; then
-                    continue
-                fi
-
-                PKG=`echo $STR | cut -d " " -f 1`
-                PKG_FILE=`echo $STR | cut -d " " -f 2`
-                PKG_REL_PATH=`echo $STR | cut -d " " -f 3`
-                PKG_PATH="${REPO_PATH}/${PKG_REL_PATH}"
-
-                >&2 echo "Installing PKG=$PKG PKG_FILE=$PKG_FILE PKG_REL_PATH=$PKG_REL_PATH PKG_PATH=$PKG_PATH from repo $REPOID"
-                cp $PKG_PATH .
-                if [ $? -ne 0 ]; then
-                    >&2 echo "  Here's what I have to work with..."
-                    >&2 echo "  TMPDIR=${TMP_DIR}"\
-                            "${REPOQUERY} --config=${YUM} --repoid=${REPOID}"\
-                            "${REPOQUERY_SUB_COMMAND} --arch=x86_64,noarch"\
-                            "--qf=\"%{name} %{name}-%{version}-%{release}.%{arch}.rpm %{relativepath}\""\
-                            "${REPOQUERY_RESOLVE} ${PKG}"
-                    >&2 echo "  PKG=$PKG PKG_FILE=$PKG_FILE REPO_PATH=$REPO_PATH PKG_REL_PATH=$PKG_REL_PATH PKG_PATH=$PKG_PATH"
-                fi
-
-                echo $UNRESOLVED | grep $PKG >> /dev/null
-                if [ $? -eq 0 ]; then
-                    echo "$PKG found in $REPOID as $PKG" >> $BUILT_REPORT
-                    echo "$PKG_PATH" >> $BUILT_REPORT
-                    UNRESOLVED=$(echo "$UNRESOLVED" | sed "s# $PKG # #g")
-                else
-                    echo "$PKG satisfies unknown target in $REPOID" >> $BUILT_REPORT
-                    echo "  but it doesn't match targets, $UNRESOLVED" >> $BUILT_REPORT
-                    echo "  path $PKG_PATH" >> $BUILT_REPORT
-                    FOUND_UNKNOWN=1
-                fi
-            done < $TMPFILE
-            \rm -rf $TMP_DIR/yum-$USER-*
-            TARGETS="$UNRESOLVED"
-        fi
-    done
-    >&2 echo "Debug: Packages still unresolved: $UNRESOLVED"
-    echo "Debug: Packages still unresolved: $UNRESOLVED" >> $WARNINGS_REPORT
-    echo "Debug: Packages still unresolved: $UNRESOLVED" >> $BUILT_REPORT
-    >&2 echo ""
-}
-
-function check_all_explicit_deps_installed {
-
-    PKGS_TO_CHECK=" "
-    while read PKG_TO_ADD
-    do
-        PKGS_TO_CHECK="$PKGS_TO_CHECK ${PKG_TO_ADD}"
-    done < $DEPLISTFILE
-    rpm -qp $MY_WORKSPACE/export/dist/isolinux/Packages/*.rpm --qf="%{name}\n" --nosignature > $TMPFILE
-
-    while read INSTALLED_PACKAGE
-    do
-        echo $PKGS_TO_CHECK | grep -q "${INSTALLED_PACKAGE}"
-        if [ $? -eq 0 ]; then
-            PKGS_TO_CHECK=`echo $PKGS_TO_CHECK | sed "s/^${INSTALLED_PACKAGE} //"`
-            PKGS_TO_CHECK=`echo $PKGS_TO_CHECK | sed "s/ ${INSTALLED_PACKAGE} / /"`
-            PKGS_TO_CHECK=`echo $PKGS_TO_CHECK | sed "s/ ${INSTALLED_PACKAGE}\$//"`
-            PKGS_TO_CHECK=`echo $PKGS_TO_CHECK | sed "s/^${INSTALLED_PACKAGE}\$//"`
-        fi
-    done < $TMPFILE
-
-    # Strip leading spaces.  Don't want isomething like ' ' to trigger a failure
-    PKGS_TO_CHECK=`echo $PKGS_TO_CHECK | sed "s/^[ ]*//"`
-    if [ -z "$PKGS_TO_CHECK" ]; then
-        >&2 echo "All explicitly specified packages resolved!"
-    else
-        >&2 echo "Could not resolve packages: $PKGS_TO_CHECK"
-        return 1
-    fi
-    return 0
-}
-
-ATTEMPTED=0
-DISCOVERED=0
-OUTPUT_DIR=$MY_WORKSPACE/export
-TMP_DIR=$MY_WORKSPACE/tmp
-YUM=$OUTPUT_DIR/yum.conf
-DEPLISTFILE=$OUTPUT_DIR/deps.txt
-DEPLISTFILE_NEW=$OUTPUT_DIR/deps_new.txt
-DEPDETAILLISTFILE=$OUTPUT_DIR/deps_detail.txt
-
-BUILT_REPORT=$OUTPUT_DIR/local.txt
-WARNINGS_REPORT=$OUTPUT_DIR/warnings.txt
-LAST_TEST=$OUTPUT_DIR/last_test.txt
-TMPFILE=$OUTPUT_DIR/cgts_deps_tmp.txt
-TMPFILE1=$OUTPUT_DIR/cgts_deps_tmp1.txt
-TMPFILE2=$OUTPUT_DIR/cgts_deps_tmp2.txt
-
-touch "$BUILT_REPORT"
-touch "$WARNINGS_REPORT"
-
-for i in "$@"
-do
-case $i in
-    -d=*|--deps=*)
-    DEPS="${i#*=}"
-    shift # past argument=value
-    ;;
-esac
-done
-
-mkdir -p $TMP_DIR
-
-rm -f "$DEPDETAILLISTFILE"
-# FIRST PASS we are being given a list of REQUIRED dependencies
-if [ "${DEPS}x" != "x" ]; then
-    cat $DEPS | grep -v "^#" | sed '/^\s*$/d' > $DEPLISTFILE
-    install_deps $DEPLISTFILE
-    if [ $? -ne 0 ]; then
-        exit 1
-    fi
-fi
-
-# check that we resolved them all
-check_all_explicit_deps_installed
-if [ $? -ne 0 ]; then
-    >&2 echo "Error -- could not install all explicitly listed packages"
-    exit 1
-fi
-
-ALL_RESOLVED=0
-
-while [ $ALL_RESOLVED -eq 0 ]; do
-    cp $DEPLISTFILE $DEPLISTFILE.old
-    generate_dep_list
-    if [ ! -s $DEPLISTFILE ]; then
-        # no more dependencies!
-        ALL_RESOLVED=1
-    else
-        DIFFLINES=`diff $DEPLISTFILE.old $DEPLISTFILE | wc -l`
-        if [ $DIFFLINES -eq 0 ]; then
-            >&2 echo "Warning: Infinite loop detected in dependency resolution.  See $DEPLISTFILE for details -- exiting"
-            >&2 echo "These RPMS had problems (likely version conflicts)"
-            >&2 cat  $DEPLISTFILE
-
-            echo "Warning: Infinite loop detected in dependency resolution See $DEPLISTFILE for details -- exiting" >> $WARNINGS_REPORT
-            echo "These RPMS had problems (likely version conflicts)" >> $WARNINGS_REPORT
-            cat  $DEPLISTFILE >> $WARNINGS_REPORT
-
-            date > $LAST_TEST
-
-            rm -f $DEPLISTFILE.old
-            exit 1 # nothing fixed
-        fi
-        install_deps $DEPLISTFILE
-        if [ $? -ne 0 ]; then
-            exit 1
-        fi
-    fi
-done
-
-exit 0
diff --git a/build-tools/build_iso/comps.xml.gz b/build-tools/build_iso/comps.xml.gz
deleted file mode 100644
index da98749f35ae044ba71048c42d9f0281429e984a..0000000000000000000000000000000000000000
GIT binary patch
literal 0
HcmV?d00001

literal 160726
zcmV(xK<K|8iwFqYfC5(l17mM(aC0trZEOJSTw7NgSC)S7UvY+a*}2S`>7M1LSA6N!
zy(XQc$If)RU$RP4A(d6CD%V8@o{%sGn-JU+%h+xwn6?l?7NQW6E!$W;ki3h3aMlp)
z#Nhuhd+(zXU}H&D>Y1lm3l-<G_db_8m;IeOd!PEva8hL{*|Zd0`}M&4C*B)ivKG;!
ziWd9zz~>i!8~WwIZw7yH>YvVjdgj8PKRd@FdeX4iXP=+``1fbnK<MPjGqPrXdj90e
zvlq^?GnA=lL;A^+=RO%=14MlyYDWh^21N#c!Pu#osXGRSAgn|OrD#;Kp?paVMdg&N
zo;nGMB3F}=^5949e0CP|bt4Ido;pdH2XqXY@^7ky6_>1mL7t!B`5e!0@+B9a`7B?W
zffUB`*Lc1_5qQqSvy-T>c34#YO;HOJU*SuWJYV7YBF~p8EacC3mi*=KKl$Gh9)TV>
zs~hSMQ;88~m^w>sJx}QuJF!INfx*8x)>h6QRh39wX6H2fvJy$Cjuz|0Cu32vC7GtK
zsydURN`zikC!Qe_c(#`5Wg0TRPG&iVp>8c>SW-;66L-vkZY$P!afufjyy()iK+g=L
zj7?t5QoP<tPFw*wxe1=j@!TfQx%ABP#TiIpJXdSQa}A#BC`XZ?HHsx#BVh}$8@xf=
zk<%U+JfBijXgjdPVv3{LN=om<auEvKNq@VOF8}!K7Ce8xHU09=(@q=-9Kw|+eElk~
z+~w;t^i1<gjaOV=*<=*j<drP1)On=<bRDJX1B3gIZoYEI*emzmo_ntoaZ-h%s}sDs
z#H*XUn&s6puQqsfmGSBVuhw|A!mCAIoAI7yUc1Vx1q!dhv!igc_4?-M6<vo{w!jxB
z@x|dQe8O`To-6WPnG!QR*Kx+^D6KTXOF3TJ<RzD$S-vs@DU6run1qoP2td5l$}cr|
z=}{-S2D~p`sPV!QUmoLyEThOKgm|IB3mpfRq2er#N_IlOEaNnenNm`}ENM!|`C-^!
zp8gM-l(Lr6)s*55$sT5=33ID?v)O2F!tYu0d9%KqY1W$0w!dmV-=1#P*min*3=*pF
zKhvx<T?SFN`Iw$-%?2bsZFX#wl^PiQvu(?!s>CEyW{#bbVU<xjuQE1vV>!!LGrT;{
zSJS+l<K+rpongFujh9_Ys_}Azh-UCvE<>QBOjz-_JHg#NcMIGt^0hRh_zHJ7xm)9F
z<CwQLLlm2xL^+98w^f@ibwQ(9*}lcLZ$Ym?M{SR7k2g1<+nz&a9lNYSUDkju>(X7L
z@h{K7Hu3YFE4$b4?{19k&i=Iv`Gr3L`NGU@?iQ^L8%M~avK28E1DB@58*B7nV`Xt!
zih@SKBrVERC2UHjB3o=oH(A(GR8$R^Zpd25(j7A*GYQshB_g39>%^&(hYNVC$m4e?
zJ+-yW5}Fd%Q`!iVSX`A-%J(xO9ZCu7rr5+R*|Mb+Brj-ND^ReiB&5jJijPt|ZE{xV
zchqv`K*K|)m9VPlc3e)#8XI-Y!;~xqLO;bmI8S{L`pBTVU}3W6qdK^Ep3@>ySRR$4
zx^*x`p~FX5SX4odNES0BQzGq4Q6-a_9+8j~2%|EKY^_I?7)<f7BAd4EqXxJWpVLD2
zR?!Agvh5Wb-zr>kgz#v`jT)*3eZV5-)-rS*6mn!h&`4oor1+?n*x|q7#UgC%A=t+G
z!)e^;AI?CSd7Hb4uVNGfpa+zJ4zf!^7a#=zw1Ec0988+$4<}JocsRz6Q6Rk31MLCg
zPl#oKN<-NbL^%ne3eikraiVx6gy$I4+(%o576O}8`mpj*r;y7Rr$rpLxdM?CTN$}k
z%A2Dns6EI#`KH!#kX)hUS=Ot*1Mx?Eag3?}`H$OqkZ}`Ig-D=;t+nLUb&{TAJ{pe-
z(8sXG$gsvR8L)$mQ+yPN6lTy=gjxT#f<$?0dqN;Rj8nxMdV>GZBITn-q+ex<JL4NW
z<8wRX>pPj}7|uM|$^4yCiaVJ#N_o69z61m>mZo2>UVC}70LoZfF%w$Z&iL(}@$sGU
zF{DoycE*=?GP#}cTNnU2>7||Yot?2v@0Am(T$$9jEU9w25d3&0gHMcuM%SR|Bhc$6
zNS&J@1#gvC=6Ge@OMxeBcJL21KbMlq6b2QrBJz6I7%Po7`2tBxw+d^?!79y(2*{q|
zm6BIim*li)eY_>oLT~3@-<)FmSHIf7o@4vj`>$_)wU;mLZ;b7KTiu_y&0f_?`;W5w
z-(F$+_pZFo-ea%o?(1<+Z)LCNa(gp3*`7Pg_P*T!p1t~4d-Ej}@!iDU^wi#T&PR=e
zs{74>I$M1Jf&U(x3Gr&3*QR;3N*NFaoux(Pf+|dTbG&+oQtt5Ty5Iw%9A(eo6Sd|w
zdS)pFo@q}}E)eA=Mbm;Gp6p$Ucg;(x0|&&Pkj}k83U`_^)+qps3aIC`DPApkm0j~#
zTNHl!hqU!?NF#?O9T&K1T8zX30}IY##arDf-pVwIs}L@HNWpV=DX=P5p%N`Wpw=((
z+yh#wvfi4O6BK@+0mD>`EVOInqj6op!x69m@ke}R%)=9pAp??Ti3+;BROTz`!_3!E
zDn_7-N<|6*=R9AT=A}9OL-GY)+VBcNcv5K91F6eY?gQe+&?GOxOhiT!1+*Agq`Y;M
zyqN2~<pSP(!wW@TfY(~6^McElr+ERYUbsQY6#;zs@+2=j0xH5i(9%L4GT`4e;%X2h
z#w)xq#X3PIh(F=WQ@pSMJP?_H(m=Xk1p+9iMA9aOOkTJO1^R2&5#*&wu^%#9>z5M>
zQ`%Y?M4~z~_0h}9ML=Ydr;lD#0xCT%*mefA!L21pjq5`DdK<+l5z^V^;}k^AvQ0<v
zG(_AhNy?B8J1eM%pdCs+s?-$?0)DB(?-@KBlm%#pA;P%=aPSEbPP67?2(A-9fFc2n
z7~rI5+q1`FN7megggTKwLdXQ+CjdK{AOI*e4S;I8xxq+Q6@D8&nk0ZEfX7W<USk9j
z0a8{VROIDVUS1~jSR^DVfF>Zxb-p_FZlKvp0hwza^u%1|53e$hWD1KHl{hGVhN{{s
zV2v`$A~lE7P!tf}@ybUIKb=BI_#48&R}l=a!M5er`PwvhT>`)mo)O>}GSdi-TNydR
z&JFHXL>8m?rk7o&aE7~giEW;{**47X4R$jk8RPDK%Fbd*#8JjFFlkl9A$f|sC1P}`
z{4qi6qa)sd<6-=}F!aV@FwzKp{Pw7J0pG*8&oo__6oC6-Qoz%j6e9Ov)E|NWV#GI^
z>ogHIppXWN7G<mf|6?e>wmnIc;W-Gv@}XKF?kP?cJW~id9a_f=puYb2^y?SjUD<v1
zba!Ly#nT(RPj5ov%fj^T)7h7IuN*#qcyWJnH&@-Ay)JThXIFhGv8^KtEV6MG%6s`7
z4>4cdZ0v5#?tVSL3;OimH(pF$YZbqN#UGA}Q;nY%3P0ap`Dto$ck($hK7IJkN(@Pk
zY7Y)cswE?Vh>A?dlFgK2!8$5?1c#FZks(DAQcPAAtHpMZX3CeCJz~h9-$)v47rHqz
zFbHy_kg9~e&lv}_y|Ft{#YTQBpn$NVIkut(X4iGg4$3>K7#{@Wjf5mq4qEAmV+3X$
zF&!&pNtPN=Kr{^evEVOU{$R+$!`+|_kkz4n-glcFS^(joQb0#>aN30+MGE00lFgv9
zh7Bp`2plqHIR-Te$_~>%AZsjX1m)E9B$jF=Y~A=Uun@-%JO*N*Hp)^`Hs9}GfT21u
zMYBR-DR@_=hJv;vo|GaHNsahtv~43Av`|O03|S4jfWUqw!FCh0$rq)R6ng)}znpk4
zgvjG!G|``+fBi=Z`epxu98Ix(b0wm3=(27mEFcOx1VKj3l#I9%3Hn+<LLZwraBH%S
ztq3l_P?CKy_g8%@@LPv+rAW}pgv^TG2eYjwl}O040X_R)MM!`hbmIf=u$oYUE<?Z`
zku(7LrqqvwpxaoBS&Q|-@0fkO_ox|kyaRJAVvZO=)du9JO*|owm`XoA5OaVv=q?QW
zaVI9*YB=!z2_lp*sBVF0J_@k=cn?}113=SlWoRTgzcJKD#Z4;}3yOVkrG?!>bB6m6
zpgVocT<Ex<sy}c$R=AIvJ5E1K-WQV75A)?dm=#SnrDPwA?H9>@>TdKgTE8$M+Q&XM
zY^gyP%cNu~QZy)9PU0L7%4-Y-Z89ihAxjGk@iYf;i5+xlLnT^GDj~zvQwm<r2`I;q
z3?(RQ#0;uR8!~L_9~nj{Vn|vN7C9@Jh(1a`H*9}SXB>Z!5;FeGU~=NH$48Zb%ok+K
z4t=Ia`{NFs(P4WAQvYIf&@F>bT0;-492vT0$4uF}6!;Z7swLyGsNT=9s4)`L!x!a<
z9rT3dvyngA{~Wk*JC0{#{V-dy?HIPMtN!QDB|DTHffo>jQICfG;~?NWc<lkWKMeh0
z<oq8$4#fHC+0$nNF@7d$9Hd-RmZG4)1zjR730ML2CS)xdbbOyb`*G0HFR6+hxaziK
z!nh2P?%lfg7gW@UWy{H+s-PVXs#4g24;$==@#SSxGK}DRvK{o4#L<*XjvRC`1MXp*
zC;{1rbu$(ke*b+`N$jAfEyJVlVr-{)AKgx!Y+DVzWj~}z5glZPT1(>_)<dmac0uHJ
zwi;?v^L(A>(>#BVFQv))W|LCNd?~~8PkONHIV#EJ`R~YT=N`{DKukB=olmwFEZfvC
z;+064VCvQa^q9juq226wJi5@IWHT`;JF$-T0(cAgcX()&RPa(ST^Q-5B)mKPDSbNX
ziCkRjo%Kw0vUI1nKzC;s#3kK2Wh_zZba%_0S-cvC_q(#<5-eTbYF$vh$8#H$=XSNf
zu}ufB)+l&^w3l|^4bk@7-5I|^S4AIov%onR50z`YQl!h1F0VXj-+t_7bki2FH+$t(
zt<;m*P2tdW!D(?Lkyqz?x_;Qz@&>Q#dEJlCbi4cO6kno?dz0O4ZVvhdZEPC6v_!Yp
z-n`b<-NfdGceN}__s=qPJq)T-sPko)7xLW<Z9b9A(eLj>M-_=hRjD=aaG{cV8WpYE
zTfe1w)S5<mPn>kg>h{})BHPn#_7mMqV-9)|Z~wG!7j3pL1NCGFQ=r>9*Xh2_0>eu`
zc(G^c;D*my$5Bd`XRaP=@zP5_w63DGo3i_Oad+Zz55_K?I1b$hIoVG7Kfe&-*m0=g
zkP`EMp~5zYtPtG}@PDx(o(NcKByQ^3h<`RbK_`^r{|<$mQX+CFNvc#R2`@x3Bs=b3
zPSTVV{|Gy(TSHbbXcaZ3h+~IZAM1i&{_xK^q=a?dPR9HnM+`-DIYc|Re^$kmFH5TG
zzmH=>)&~I#jhRmPgJ4`C=qCRk<gJLMgcMD&6-)_OYc-YhU+H92iYEOdrw@PiUa(5G
zIqANJfA+W%ptgkbLSk_Wc_S)V1#FxH{<)IQuz$GXYo_Xku&o>z)Ghoh+J-4oG^%U;
zK^{CQ0LUFt74n2|6!Dy<Om?P~+09KtiF^uVLp*<#=Znm9Ku{r|GTWmogO(s5TqVBa
z#VyVIc8>6m6pFy=FIx+$9@kT7H_8%Sy9mLQ=+991{zM!lsPNi8GiA$=)M!U*e%V3!
zVcAOPh9ZT+@~CX;T|t*91k11_Cnd)uGung0gc2DjX32!H$ULh_Va2%WA+e4fC}3BK
zFHVutKf!ZF=2;N7tpU3k+M~CGBfSJTH-fG23{nxQdF@u1m;hb<B#hrc`@764+3t;_
z`5uIY6&|Ln?~G4%_naV%)mF)hO&HxxAb?w+VP0GqwPko^9-`fF+@8C(KR3Z%r>ny1
zZ2wmM_4SS>-~!H-j#z&k@|wWB)Cw7j`Ei~AuX5sDuX46mp9uxBI;sc&mm!m)tY_MU
zR!1UIA#gVfNfhlUR{**a+4yA9zGr!j7odI^5hgx>?jWzT5a_jk7Xt<-m%>IRL$dVC
zBawIyIL;fPr@Ia`f^YZfmVRw}tXUz;m?wA}AFW~t0CjVRaDFS7kj=Pk(=s6A1rn?K
z+9WJliiGln)yiGII!R_Cn}<zEm}h7*-vWd!tCcCTcWEF#M8wzwL=?6as{{tKWR`(8
zAz4Zl))yHx@mT99icsQRw2s)miJim9pG3R!Jlh8}X4nLpuhGebuzTnMJnrTue|+|Q
zxBT}PQ;%OhT<rlo?q=?I9FN}QIBZPNT6+j1kTV3>%r!Y;qdNpN%W5r2Ok8{>=Lu0A
zGex=|*9h;}m+#VU=h8m6;)Pv0UtF3(A3{r$;;=D;8|Kn9&(DcWpkJg7v<Taz*gVnI
z5IcVFcZF!mMW&LFd%RZA<($-)&0R+=qD~2i8joS9yW7rN^wEA|W*xMPyerCJbYd$S
zyLy<Rgsl_R@Cs`ay;C`1SOp9T=^{0ExE*hv6Z`DI&Pvn&_hD~tNyM&UCAOBC);@<v
z|0Bi^ceYk68)msCvC+etq?q;SyacakY%N5;{~Nj}5i~yilZcfjMBgjO^T>b7W?0dp
z=y)Wl*i2S5IoaBnLg&SDgDy*u^sHk>;zL^-ano~Ca@zOubKK{ENNYRvtW06ylT1s7
z_Bt9hUWw}FVWpRiu(%YFahU|wj$~MJ%rO;!w?5sIv_3J2Dzv<fh_x5QZ{7E5AUoz!
zjTpU|455n>@=AjB5dKUKD13V)c~gb1drnAd5NLnESQP$2?$HO11xl(N<A~&)D)^Bv
zAxB(}yjebQVzNtaN`x;IEaPuL=Gy~FmQ3gCd~uAut>f1{tN5Dm{ZSWQMY7_I-%}jt
znP-Gw5cw@nA#X~n5KqCA@rHwvWAKC@k0PKIeQwK|rJxfXoV5poMN-9BwY{<GK@W?;
zXc?9-IY4AdYDyBe)@XhW*x%3OL70Lw7kJh7kOip<#B^1Kap#$Fx5uq{0BD#TWwIql
zoVG_3k?z-j$uYR6MxUu}kSCIdFTP%U`RMLWdhR&N|Jp6j|FrNG`X<Rt@1*B<GHG;e
z@)mMB^y$l=|B_CC3jSrtg~$};U8T=s59n)Ip08ggl$CEe4Ozd+$N@=)zNZN%Cc-5N
z2wr;2o5>+BB%Nw7(ZNMZjpQ7WTOy|_6!!`Mx-MlL{{_&e|C1Jw+`n>fuX1I7?grbx
z|JVJiGq1C^U#;I~BA1+>Ox=Ec>l!B4UC$HB8|btBP0^pXlTMJdj-MClOCot35uQUT
zyf%60lca#I)dybEyhyrB4oQUPlN>r4q11O3)^=Iaa+>lr$w~WilyW?NhKy_E$t6pl
zN+0|Wd)L<6#*wA}N+U1p0U}^06YS2u3?>&K2sVQR@I38H&?1{uhh#U?-IO9d>3*^0
z*p4H~k`>FcXo|W~v~0<gVoMKlUNhDLL7d%}_Sll0vHTyl&Z(-t@Y3DY)D8yN1rnKT
z)~UW#RiAUdKHt{}k_z*m)Cb(2b(Im5R4MV21Q0f$+KZIk#93LF91au?wBDjWZ%cLE
zGuD(lm$)r(q%dlfCV_rEa4SrI5|#(3r%)u{cM;qzIwqTx)kLB^CgevcN_v6ut*qPY
z`}HIz&2tYSSaS>llqR_fWI_Sr5}+ud$TT^L^1Qt=V6V)RO+Mz5CLL_yrX`mVr9S&X
zHZ9mIgRoVU+r$HQW1=CEQ5C?ee;9l`lIT-_&csw7&q{spi0V-c;&9d{Q6yFB>CNa`
zFY-|~{47p<;)BjxxSW!#``k_b*rcseavyn`iU-^Q2)s9q1Jn+b!2$VD?!#|KUo&2Y
z8f@NcOlCYh8j_9%*oJNB9r;lPfC2(oCYY(;?&#vt2upzg`-#A*Kst%WFT}CPSiNZ}
zb+|(=n`KF|W@ZbPED>)yD*-`GldKbSNOW?0)5K1$Yor^(Oe^2e*Ab+MnJW6jHqTB?
zIW#BD$xp)j^zyfm7NgKD{KmLe$W~&s<YQg&(YUNcO`o-V^+K43rFojI#wk@8^~zuS
zC%t4Ul$NE2DY#`cEh9zBOdan^J!?vnojGhTMWj&Ktm~mkzOU|GkqTPGp<eOi1<7Qb
ze6ok+D?WA<heqYe7IM&5ahh$BY3kCW{I!3UbaJ13uey{e-dP9}GkW5Y(z-Cl+kG@+
z+x<P%oLPG`Ei`Lg7M7Fm?z}p<N&NHU@hVMbp3Gl5zOZ;aTRz_1d3A7uc;<Bb%ei4E
z&q-f=iO<6FYyZ-#v3oQX`r_dp>~gmJqE`B~e+%}9+xh->!uR)gf4>GlJ8-BNI!hiG
zg=}17Ly?=jeGvo7r3-HhDEB=3Q5ec?k}{5s<2K12wu9gbcGrO2nxWUkgWZN-KuCI>
zZ<5Cn0B`#sO=tB%aa$OGd%jt|N*uxUrJuxsTVGNq46;4nJaH2Gc5PjTBMxNirY_n8
zYg^_PB0{z_(ryo_tqY5*5nr~A5;(Sx)cPW{SOBdX^n^jP=P|B`1I?aq@c(1LvtDN)
z4m=ya{0P9a&7I!3cx=6+7z)NNx|hP|(U3q$9D8-#mB`qu<8ZXZUeD_(s|U0KEL}>F
zr?eqw)jmb)1t>t(QicqqmkFz2DF0-}##={SFsU>IP!t8kU}HbkV`+w#I>1y3{sMUF
zfMo~Nsx_p_pNz|Q84$MI1PCLK@HL8jK|8WP(nuUtU3JxE-~ethWDIr%hgGpl%lu#h
zF}=F;Ej2ygwt&Byd%mS>i2XT7B6{M?0r@|8?%*vslg61bg)a^_5)LZaD|O31ERwIz
z49buh)GG*9I(q^1@juv}OtMG<{`hg1L6-QBz5RE5C?hzN=)xw#Y6;H%Xj+J$nrujB
z!F0SfOd#QTDST-Vp}hqc?Grt%CR8O$0mrPnFB2;qTVjgvty{MYrEX;s#u|$pE38sb
zZ;a!EYh3MMAB7PvA(}|HP&{!CKf8iRKDW`q5~sKp0<kFZ4i3nEslGzBIbunb7}nTs
zNfQ)35C$7Zk$#&&f9oi0P(}cdCbmm7rG1+h7R@<X>#-(q;*<*{Vk*1D^#~kJ@TF!r
zW6i!ZVHejZa*2-A817%>Y|t}yhNPi;MhO`-bz?xTf8S9MS9Euz_(o)3A6Gz793U~N
zWJwFFAiIZP3C;qlgiu7VZwj%$T}1s>5DFZ4KC}d|U^4*m*BhfRY8%Is(+ood356-J
z9V|T`T!xl$r4J|=SD|_Z5`G#)=xqZ(6j7RhMF86X%faV6=<9C=VRDd6Yo?Dt8jUf`
z5fux9`VTR+@AW&7UvM`FBT|%+gNe;09z+EbzN@|Yb{KHK3&l4VYOfbIPVZgu0)@q^
zr&F`i>GD<b>CNo7k{>fX9i1fR#PIY+`E>feur@*&p*JstHh@swC<%+kDDH+~(HgH!
z+^1>@6c@xTG^U#4aAG}-7xIei7zg)CtiSDkj_n-?!G%=MVa)*w>0cN_B&MU-C6DZc
z5kk6zt#Or?eJ%yFz_(E7ZVo@sISXQ4aL8p1xRLWV#e6uB*mPYS5nC5&yg7<!+>*UM
zi%})hCKy4e&WS+hFfQG9bPHNXsQB4M^+>2X#?=c-T+D#MiGiRX>3WR<f%e8->_p7F
zqS0>=fV}c90&<I}H2|6hw>dyJM{&JUOL(sg(A`Q^;?f9%(3oo=tlQ-!>}kS>08*WV
zHNgEykPTKNU?PhCed?2v1KiJbq(Hx*5$cQUBJz;Ewb!N;oRH1$v<W_~FwS4_Aq5Ws
zzhji^qYTl5UjWd1fS3UWw=D7hzY58HMlZMv#yNh10&an-`7g2AKJGSArv^Zk0#<y<
z2T&#;9~R;OY5gcW5UPT(LM0YPU15dI3mhej1&&qhyZsdL+NL;_UA=<|kX5@{K4(|Q
zsC<E4UE@Lk`@G!(-$D_rIhr2%jf4=_1-^MI02CW=G!97HF@(QLtW!YBrZ~3NMDKv&
zV{80CP(lH^sK!9a7McTM{y}~;?HS|_wuK0^2iby;N8?ZSkG`g2g~MR%;NW}Y-yzr;
z{Cu`p7xfWeRu*4g89iRud%4*Es<MAF{m22gUOn1;@%>JGOba~FVud)NCxhjan>#1V
z7abhy)%Od(u6_HD24C@%qG-XW)nSSkK<2C_^^an>6`XuMTxABL7;*{#6H^VQVadJq
zhiBv0pN-#oHeP-<u^rf!xEW8Ep5A-9{Pf;07k;_=bR`@l+o0JRp6Uyf_3Q+U4?2hc
z0~;zZEy=994=;S+1Ps*V?M##QH_0Q1V%HH+x%$JF&cW6GO(X~|jxD)XpPHK99f)Nm
z$fM@{!&@d0C5K{nnXH@)Yz7=wvKlB)o0Q)Q9c2{v3mIh!xv@Wf^W*wohJFmBm{7FO
zKYBlWpOBggZDT>3V8|>*&hMA~r_296`AZ;kN=hX@g<=%Uyqr-iHLy+$vRPrrMpI6s
zjv{6tm`JYg&sTq5{CW20#c<GZ6kjL|BjV{Yq71h#a+L37<v_fri;SY5ZvV9SQ|YI`
zqf;Y{BsKZgA3l^#IS}Vj$*up$C}>(CxS18cKMH`(QoYGQP?9u!HnjF^==!svTljb8
z+0d0|L)XaXaEz2CO~O!568gWs7g(5@1<#f|y(}`>tjE`Kfj2KF3yg<c#<xB`3f4&G
zu&evpa4k~#Ys6y#UNAFw-fEp}5Rmzp3C0CACHk(lY=osu)1&X(ByIl$s>XpYHaOAy
zG7Zebq2LOA4>b(*{h;sh><>2{k4bqwX<Bj*2|$AZ-|#uZBFwO6h7N7|9uI|L#hy_I
z5_f^%FpTJ(`}Tv&_Rglg6D)h4iS!UOvuq*M0`HLdC7#Y<AnllGM0YYw<BwU{Qne_{
zFlqaVY(;C@jkhKoCm-SOS9)X9z~X!w4vzA-9ZBF7SZ8g4$8`mQ{G);YB*9Q%+b+^|
z?WSFOVDC=YwOPA%#jb6U4}nJHYX4cuDS`e=Art+U6iA1^j6A*jUn@@oCssEZg&#co
zenB$HpC~x}!rsYzAb%W(=Sf&r{5r59nJ^!l>X(I<(EXpg;3W;>ATmO=EJe@sl=Ok3
zS)ZPlJ^|8uL9pIyDp6p)VlPkf`ePEomk_IE`0N7wAc~oL_tC+T;*fm+Lc9YTW8l<(
zxBYp%aShR5<he^I60q_duH*`F?+p0eAF4o^Kpt2fAA4^%6r#rL6+G*Y*)ghz_cW>V
zaFGlr#L{AR2@bLE@bE<<Q)#11@Spvo5|Gsaiod#UwZswIJNO;ISV8F2zZ0&=Ua{{?
zI<m?1?XW=(9Na-S15}p<;YNOmJhg}>{u}cefp_aW{O1D|x)w2%k>QKtC57Hz?X7Zi
zT#(lF4Y+6<plGYOaa0gU^PL5Clcm$);_2>nK~T&;>D|(^Q7YcOfanv&!0sTnRj$Lz
zYCa5#)z>|68VI6Kb%TqcOBICY?@7F*tcq1+WTZX7ueaiiFTG)J2>(><jY$xp#xg8K
zYFugRRfZFZA~pWqaM9@sqBpmxI<*9v(LfSfLu6$iw{o>=QE=rm$F+6V)-Pm<Ql7<L
zTbY;p9`EvS`EW^hmqOjwuOMi$$ciXH^0uFRenkGgEFBH`sw?)MRO22y_<#_fB)uMh
zqDgXh+#apE+e*O&sg)N=ekfDDXAV+qi6amGVbp*ata{LfWiER>CGm=!dZ0oy<KK9X
zwp-V~#JsA!2xaf~aqZEIhXXHab1x^x*%{M?`nWRjYH1`+PTqI<pp?Yz@4J7TM0_HK
zTCaHS9R%XUjoRRl6_;-SzLA)$mw;^QW(Vm&`Krsx#n9u;t9lITSs)h{Hqtdq?u>Dc
zu4HgNE2~Y|978$)$zXB9S8>kOl$sT%)XeH>L$_orhNY>eV#q;h9m&pEf=uxkdo_zZ
zjk{wgAgS?YMBbDW3Grp(%L2Ix;@ql=Mb<%8jv+*00AY__h;iQwX6NTQoh;!!s+EXH
z+6-i<DyyTD$mW7-C?>chogk+|{-%{&humpIym0LCKo&}3LOE7D5{Z07ChBI^RCOng
zeIgr?a=6Y+*GxUD_>@!x`ynA(7wIRL&nj`Ovl$!&qGpDgObhWzyQZKQFK8n|PVO2B
zQj<G|B81|ukszJ8W2jhNU?yml${VU?B|GwRL@s8>GOZQzY0(?N)vjZjbtt0aGLx4h
zFDs5wR!QcBX8;us7B@1KyiR5?l|Jvt$!azty|lLz6T~LSs+Z1*u6fa!#vK!Q>gLH$
zq9fF?>i`)#GV{fIh|X~C+GO=aWQWAHit1-O7MiSy-hIu=C&dpfuWBjLJ>|2#Ir4ZG
zoe23{tfkqAaaKkuqYAGJhL#p_<Xk(mSBn_gI5uV?8|z{?(tDzIIoYttG8E_hOkLQ*
zG?PL^2CikC=RWbGK_y+>%2INcobNCSnx*DM_i82b$wE%_F>WPH(RpJja!&NYALD!q
z8u5WdZOqO%qqtkmk%w=WtR<p`k{(m?c<bpBzS})%McAZE6@J+F=tffXYJ8P4V+^FP
z`UDMn3%L$Rg+U9^Lj+sqQvfTZp<1GTFZ2*+s6-x<E*FZN$VYXQJw}D3YUY#dEQO^t
z<U(4Jg{?Y`tQGZ2x>gA#kFix`uhL`cL?0c7qG<S4v=6^R5(rMJWFgh84)UCim?rRa
zrkn480;I4VnJ`f&v}=`CG}YAAoE%lO*khWH=qkD<>N^P$f5J7)$en73JLQD!A$YL#
zoEp(7@fjxY_3GG$@lKeStCsNf^+zfod_{&NyJqe71z*mlD>J8>XVa4<8{{FDJe$r6
zWEzWip%uQ$TjHZbPxnL*oL#$I1~iq(x*So>%wrjA7;5CnW|Dx?QVik2sEJ<|J(>JM
zkC<<Et@5()VwpD-a_fXI;%B{|z4s^KeNl<<ZarJbDW7K);isFSB#C#|ku^KXGtLlR
zr<r*npEl&A(m_S>+Uz8HyyfYFgcoGoccis~B|Z`zYtxkT5p%eW*5tMe4@xr~8TIlQ
z>B3{u{IVdMT`4o(0|T_aMQyBHKGrH!NTy;PW1*lXh27Bj^QK2nMpeLbjlTN*zeLaf
zxMrSkK+dYN88!9Rw1)(V1Jeo_uWj@}?k!|K+vuTNG1QdsMfZ1LlC4=iA-vG`HA@LN
zTO!oJ0nh?#rrK%9hN?7ZkAHx_6DKQ60ypUsw^{2qYH5uc6=0cR)mmJ3XN=JZEms*2
z6cAGhiIU^nqXYi_$$$F|yWq+&A@y;nmZGU$dbidq_2nPmu(ZP!DdRouuu83e_cT5%
zT4tx(nN^`eAp<Z$NW~Rxt0ZLL;-URygRd_ZL$O^PmcJ}e`ksXMAOdR$X`xgN-|ei~
ztAqCH3{v$JDOV1a@)$$T9c0u&dK`PL-(I~IUMp3@H|^-ln=|;99iwY^=o{8KwapkM
zxCyH`f-whZFub<uJIPyNbx1GvzB##k<D`F?sXh!%!-t{O@baQvzfQvxe!Yh;UEXsD
zO{iqvIwv%#GD(L>O6YH{*WS!tKOMd+Q4NFFwaM2D>#s`@G|^Y>?F-1wF@VwnC@|pi
zgpA@Vms?FaKep`cajY?-OeA+1XGmnZbhxd%a6=j4W~)03{~g%H`SALj8q(HNo{SlL
zy@>w-*~JzoyIA+|WRSo8ab(p;kf~w-St^ifzFefT>XeFp47n<*__`!;SJ>rIyBt`E
z0q;oiiJMWSzO<gy^dD{{)F@Rxo(b=twn;gdKo#F~y!xc}<Sx=b>>mw2*?zLenb^00
z@g3i$tC3Ip^iz1z3NnW~1PGNPCl;qv2NnA2VG)TEwy}keks-XerN$~-?hf)73^#$4
zyGkxU<KJHAuN_hXf=CMOSC6LQDv-bKRp6A`51v%XZ&2rdKrYCbOLRQ|toTRMv~mUc
z0j8srnv6U=uI!RU<YoEd@!sX*y}(6~m+#-GQKRFrN5>C|(OS#u68{6%c@CpKpPbKU
z*-I3r)LBromZWPE@mIQ4y<kerZ!pik6lE12$BcA&9iOLFE>hO-h^x^m4$2L=M9Blo
zQ0T$hRvb#f%Uth_)q&u1c@C)sTWJ3=x1kB)WNSC#460-Bv`e1TE(M%+2~V4gw$)QG
z;*}`XUx^V99b)4p!8({0YIgN5&p{J{#+Lri85Q&V|K(x+;`?&;aO3f%Os`~08R|RA
zIhE?`>B+oq6#Nb(X_o(CR!Jth^uEIl(vr;FK85^y7WGO&-lvp0d}v7T!7`Fz@2tN~
z{3u0|3n?jO96szKQG*8Rd?tONy}rsxBEMKlmMlCy1tTd;bs6}uqfAONE^!IiJ@J4I
zMNWd0nZ_44vQgG7;{WLe^>f=On8Ks6^p;LD<;nCcx}eztd`7B8nwv;=?pby7#^Flh
za3u*6FYO#!^TI1KqoBBI7u|2_lMgySj3RC25curiqZg6&@*1_MB3&kIs5x4EQ;da^
z3G7XAuFQ(PcCiUPrY|rE2?^RQ{=)j4Rdg)3=ofG0m&<**N_F@s#U|wqui$5LfI#h5
z#=yfV@7YA<shJj}nSvpTVAUCmSd_+yh0<Ose2hU^8f~IxWlJGLjE^o@Oq7HIV@qwb
zUhQONnTh5N)}9Y7LzUV<MXIY}TYEk@{(NBR`9SGp`QppXiRS}T&j%+_Iqx<QVh*i7
zAH0iwJm7t-9dGVaa?I9omKZZ8)unTyuCmlv6gP^jlS7<GvWN{gF&0A+^R0tAxx0$1
zbqB?AC9gW(Hkwk(Od7g5w3THl%eTwBDWXk3rQi$-{6zUIuaBSJ8kRu)?$+?>ch`NV
zd^&UGba-BRy*v5(+WhM)^Ab>BF3$h$LGAU;GO}GVCA`+b<XKhnRWOvScm(U*uyU1i
zOg8A*F}*ydYFFgU7MoZHc-PN#d4SswI8)|A!%kS#x{1V<GmS!aKKe`K%Zv~ydzdoA
z1LU?#5gAz}79_$_GlTrTTM}|<Y}<E+cyL|B9|j$*(sf+Cwmb@@urzJ)ziz*m_!7tp
zX|cr)u8ysh^rDCLU^Gv@wqGW>YHtj3sfKljE3@X1Y$8`Cg#DCJ^MJRlIQ*EaJTtIB
z755r51C6AaEZac2JR>F?+Ar1w_RLkgd|;Pv(6UU4>Pvu<9%|@KP)1FvvxmHyJPSd-
zU%_9taMP4q(}^6NF0E#)9EWgWXw#&Jl+02$E`!XQeVv6QuyO+FW)E?x^2F6mYASpq
z&$cgQ^5KjsNl8o`m`pLf9;T&s>n3;T4mW^cUCH6&_Het-$<*T>Sd0vfE5~=#Pqu$g
z+U-y=z9+kH`w2Z6f9Do$yZz+;(M9tA(9r}XGNjUc1Hk)9zP3^5>J2CqC!SC;l=+kK
zu1cM~>2y+-@`raMOHHb2pho3ByHQ^ldj@r}6)b!$v-}jwj8M+cVsnyD*tq2=>}|T_
zom2<UPX;>W@Q4O*fPUjij~!r_TV4eea%h9FsUb;rwpF?_c9pDtTo}c!KHy}bTbN)Y
zX9Mze2mEJR#al$u(Q)#VS831bqv(~qqs*WeILjzy3aw)L4pMmw*x@?<-NL`HB`BLG
zS}5Esxz{Xt=bJhXm+0H|B6l!3K(X=wd2hGr)FG2;+B0d$mo_GiD}DE3@9X3Fa`W=z
zmvh4}b}t@JTq2uJ=GR};HcoC1AAi3GmBf4B5@VWRgs+%#bn=aHYL%hiN0pQ5hsPu1
z&MryUe!u@k?E=|$GB&~X{t))wn7eGZxS1@tXHwhDu*ziTyZ<U&-p6dy@VZ%CW3ZL;
zziq=j^-iMWcki~*K{}-ymLY2$@3wOomYPcaF0Q?vSF~(Wj`(_rx3X64%xmf2My&Gj
z7{VS+GyBi~_WLI9^oW7oT8j7rmg!U&)PvpZcj?>**dUN!l;YeaS5e0UWJ~8TYk7%h
z9WV1%3n6r;ZwVd*&(;bVA?A%18LL-1mkGC^iAHLnMA`?W|M*eq&S3>Jwt^brN3tdN
zW#o)>u2bn#3|$VifV3XnOT<&B;P(j97cF91t75M%K^p8F&x<+PE-EY&e8(v2y^8S>
zO<A!-)izI5U9^kKq;sEMIg>UB>=`mi?9?nZ(z!vl=jGbY@s;sN&1uqQ1QQ(<%)89v
z=c0XYlpHWq7oo`x^5X*47zyqYQW`reuP;xYdoysKW+P6gYi};p0?h(|Z6~H5<Kem+
zkCTIJA$uRZH^j0l80c<bfLyA-KbIriABte3YQx4|N5p~j)Iza6urmr&ui7h@FfJ@&
zh(-dni*^|XPlIlHaA#T3>CGZ4Hr%34R}6RNh+FmOXcz;YJ=7bIGWIC=Onv(5|FCy%
zO>HDu0{$x*ewls1vm5)m5gonLu{%4{*PZU2jo6osGAf{qZe5Zf%o7q<Biq<x%UsMD
zGZr9Z5aJ>bHi!q=uifUMx9xc<!p6AU5d9Z6&&kSCl`d74g=y1nML4jkls9E%W}S1s
z%yTe!bzRvU9x;5}Ws<rf$J>{U;JJXZ=@L4qpF_zNWKmjPz2MKb<>}sQ`ndGCH)N7Q
z|G^yZ*|r$YlLuE1v+<v{hmFX`KW$%jfs|j*{MD3iqfS1n&q5_a@a;#GV5>jIlwo{N
zq7sbDvJz}3-Ac5Bd?-`R?NYP@ZdiRB21&F-ChUQdE{F>oj0%S(q%S|9baTw#A8GVv
zL;wifFVAS;EDLvMX<a=Jf_cXF##pY(`?wOgkg>}l4ibkD44PinSTT>BXv7)@rpQ`q
z#6YCEg)uYs-T4|G>GZ3rfhTLpCSIovfJt~HzeQ_czePqx`!EM9(}*6JXSg<N4ibpP
zPGKOwIHRt_cy5fVrto&AA<?~zQ3|W03Fa{bAkA_|yziJ`)9gM^-G|{5J7ZxbD3J>~
zu70xILa6wr;{Haz;)7s{0Hzhg%bP;Fp3KOt9i}X#OXd^*@g4FpJ`Qn@nh}|N0dC&g
z)mS(zlUSflibSI9p3IPRK#3BcteT&!)dS}RS-}MQkoYre6vW#h{B1`fQMp5yQvVyx
zMWAINwpd<*AU|KnWWc&|&{!@*@Tt;O;K<WF3TaBe#Jh^GnSzA+BbhDQ%#^9hK_6i&
zH?#)VZlgU>B6Z2F#Qz)1f^BY4gav){-Xh(jKGCvM+JgiYnMorNnP8;FUH*68*i4E%
z`a=ZL^X$%49S5Q;4tNP}A%d<J&k0@|MY2$k@OKJBIc7Mx)<JfGUEiVq2}gBY*3w%H
ziR84|gmF>F6{#Mij35z-Yv~y+z0Iz-m<BM(a7iVdRVBb0KoKBybpyfzq*VYm3vf1T
zv?qnNuJXphms<u9PjMfYC2Y^^upq+z&zKyc10JU<j{7JbJ1{L;TRUXXM-*cm>hq#9
z5Sg=8>qzvQ2cl2*lUC8eh=B#-Hz8hQBLAZA0XkV719BZ-CK%2U*)!nkHSob2&%Iih
zKFs7WMdC#wX=zcdyVe)+4AY(TfRBAQFev1T4$k9O$ia1*sDxO2?nS&GK6?rPIZSkw
z3*5SHA<))?Emo^rEXLvxN6gD+Jq&@gd6V)OsBM<lqOHGJD6TetBYm(oHsodAm^h}U
z1<x2eqqEi+l|xVx{+fZRLaO#z&x!l7r3xkiAbx-AXzco%kz1rn?<7Z4_m0O?fY!}@
z!(81SP4p6`t{$hxG4qwoaUy**-}_^}mmH5S9;Y&Iu8$wxUMEL)hmS@IZ<51rl4;?v
zk@b~Sf}s|Nc<@HkcV=#Gd7>^I=hkrd6c0wZf-3ZV9kH7Myp@D!t>4jtr>d22ZJiF}
z^)ViYGEqV6*6J^t2z^8NPCpBWnbeW@7>0N(;Tkxvvv3|vw_2emJQgI|p|0O$m|bV_
zmi~5f7tGi>skSE6Y*Q-n_`NJ@B?iqfZ+6nxa^n`uQZUsy&44}IDnJ)>kvAK&RE%ni
zTiPNOOcysyi2FY7sn<a}L+Z<DxJcpGQ+26&eFF#mVhNKlQ%J^O>qS;GV0%mN*7!!Y
zrmZ!u7|}Ba_^mYG7qEn_WDT=K&z#j;DIOhjNXrz7mKj4sOxy^1Spbp`7n4ZtGh0aa
zS!_(e>-2>WAvJ;PPiYHDQfs^+Jn|(Y@uT>~G{g0#tpJw3YV6&a@ZA8bq>%-Q`G?-}
z;tKjJ355WO^hn(b1bPfhor)a2L@qp@@I4-Al}R^MCQ$lrr9ym9#bNa00kGu9EI$Kw
z3Uo6uAV!#7N+dw0Zl<c1*A<XULQ%uA5tV}MB0waow`pI<(wv}KC<*BTvPej4w16#v
z2V(;K5p6`f^a8p4I+}|%o)-5KKp*oMx-hYqG-Lg}Yk=>;6Y@i=9Zla5Q0v3GVEF}4
zSFJ=(SjIO_fpD^pXF&*$hPlppcnJe<rWxw4W06Bb52=`gYQvOwmAkWE-~l`#cxFmZ
zREODHaDN@spmkc*@rsRYg3BB$iG>2q^AMj`dGlA;&k>H^Im|p_=kQ3&mX=SDx+X<Z
ziH-?|$YtXTklh%mxBQX@UbX?wuOvy0J#&{QP4keRR~g;UTH<~}>M5PXSK@x^;C>AM
z7O;Q>59yg;#fw7wIRNo|3eeertR6PF&=G${AO7^a9iaJi0$j#uwVcz_S#XV|GiJfw
zZSyQ>;3xdHbuR2wId~VJ4Z4l?_3p&WV)pP+|6wV8aAklB{x6HyEOi;i+k?^ZmpeC^
zQ*mb6HR68KoL<oVtEDup_OQ^e*Zgqs_3)5k^nF<zscHzgAE)VFO<iUtV%R`1J7yPm
z^ey@%|3~-4H53TPqH-|gtBJX3hL<eWXEfCb*sI%8s&T99Y&Fc)WzH(_>NZ;yef^V6
zmGEF^kiPPQVhq-0eN|M|WiA`E>b3~CG}Uc+)eO~THVa5~na7!?!ac4TuT?4XNmQTh
zIg|;mx^2{nz-nc?H}2U64bRw{&2+Y;wm##Twu1S~*si>c3eYreRHl!3#uIsU+bhnn
z6+~^eV|N4v?hJlu->NF=+WmN<M}CqZp$l~O^~gab)O|^m!MglC4LPjK{O)0_>k=Ii
zY~FMpc*QIkQ6VL%<!4R^W$!*+p?8W@(0;Uc4P%8r+PlJb)q*h5hjGOp3Gv$3xtYV<
z*z1wRVPZ=V*_yiTXN0zuO5YZ_H5d7Q!rICYY$8Xm#J1UoXAaw%p2@pqw`S4xtP@<H
z?6KcYpX7RKRxQr8tmjRF>z&|QI`X~cw%&%FEx0vz+%FN?y3pyIAg=lBaueU02E^GQ
zx30C|CeA%=oNH?_XrkS>fOakKu_n!JKyw>dhZ<sCc2N6QM0UaU^&}ygZC`SG=nR2g
zPlC*ufW4kt?g8>zR)SwQ+I3;7YzTWv6ZM`N>ZRYiG4h@U{x#cze<lKKnUTL(3Jj&l
zsto}C5)+*0-0dV7_YA`@eios?E?Q6vfL&y&V!sjR997uYMIF_^7qTe6kV7WRsoNe|
zy`V&U;B6_uh_P)k`+97T-WIugx&r<1mQLIjxtI<*fCwjc(bHx`>>{}n9nPZlAdj2B
zQ>d_yX{-0?XSYQ7sDJ9sYX51$;3DQ=N?*bF?1lT9o(cTja(Xh2PuUhL%EF?38BV;n
zh_}HblL+9Y6S&*qsn;T1R}-=k&vh}*+@oAKg*i~%BKIGoJCZpGu${rX=#E8*%D$Sq
zdN49`Fp&{NvM;wELe|=Rrw3$NkH5WBv~}Kd?<bXI9oK}39)WDL`_2@LWsl?CVpx4h
zydwbXms~Sg;m$gB_Nq&|Cp<=L5qF#H^<<}KEpq;Q%wOT$Y(#WIIGmd;;5E93{>9<f
zhDuZO+^jfwWeVh(I4)aX+iddH)8(smH1j4?eJjUhEtZNVS8c#mD^{2$NaY}v*_hrn
zJq176rcIIDhV@HBr{<G^!g__LbFz$27Wg^Y)UTVQR+|f((DYrOj}@K;t^uiGmi?LN
zsA0}MV`P-wA&!obTLWR`qj2~Sfw2Fd-j<K!?vUC^7j^h)Ho*12E2&OGUFTO!*638l
zkjSlwA_UfLlWH-n+niM_HmuSYV^^@eL}=!q(kgTs*KM8EY}Rc)D`?eoz<W5VN?nrT
z_s(1k+z6F2VuhhyXhX}|KQH$Pw!o$O6i+#AgrpH6vTi%ejAh;CadC3D#N$9Mb3a|1
zsGhSZRQA}=3~BFNIgT$LjiiqUu7HQCf$`G<wgr<4ZKSPHf(;GWGtTw=eC@m^gDR5O
zbu0LOnyW0EsAQ#?K6WDH%fj!ZbZvp_2}1ZcN(Fd{sG$p?)*kz<615nZisoF@mdRJD
zbV2fWggh}}w}V{7;c^yZ0`Hn6!TY+@ncF=PFg5~?q)?P%0@^et)WevZ1R>vp4Po?p
z2o(z^4($y>Xb&;J%$u_>ccu;>Oo;krZj|rzh_dP7zcsAv^df&h5woc$o1oMqXKwc4
z*@MkrR7<w+mOGn=$QeKVYijD5!p}AG{|QO7of6Ol(<cMdcGB^C%&EOi9eYsSw~SbM
z83FX5H4^bJjaxe_ludx`0btiC*JlD|d(p&6(@pfx$GsgE=_UYw8~&o*nRJ?Lya5}p
zZ?0<kkw!=k7if)BbZeY^@$4Km_9hFRIbQDVXLy3<cGkyVH$ZnbpEgG7O_u)lSvrQD
zMvL=apVi`g?NoeDa|83w<#U?X*JSFUU?k+1&nt9MSA%lBxWVu}`GfvF`6?9hN1SPT
z3eBLC+WZx5Zk3xwb^9gS;B^D!G&4)a!Dh(~mM*|Jub`;SC+k%(S6u<UzKdT^;nz7A
z2)!rd4|SCnNU%H;iK?NCG6}08FjAJ6BGFiwbQ0caF(;<Zd+$8<9%(CYt7u*PnDwp`
zTmKJPRiyxilmKHJ6*1UjlfYKS%l@qsY5zPHRcPhB(U8~AOR{YOv#q6gNVFoFuxOu~
zqNIVP4Xs)%Vj8oJ&Eu`4Gj30Db5)XAg@(8?l1Cp%LoB?_EwEdqx0%Y|1lYffs<8m=
zFva|P{qBA*nrjkkXj|FQ4J*2-Wfw26f>i}<F8%CVX9S<(MuH+bPg^xi2MMr6fGuPS
zEeos63X#PedED|prme1NYjH;ozcYR`c#9nUwRqg0f78E7-i-GDSXgw1@F}JZbA>ty
z$|~ru8PrbKP&-|#DBh@P8)|<B71<TDMxWL7VelxH1oe1*0`E*1TCg*WcQ+uIS#h4-
zawhUr|4E|eRm2ap+{Gay!VhXh8PwROnWjY*qj^kTt@Sy>`IN4RfM&63C??aWb!0#r
z0tIFoHwMEOb53mjGfC~Cx<rpcCwwRpeLO*R1}cE_nIB^c1|8Ol3s(Q{xR|iD#I$@c
zpP-I&!vyO=B1f+zdr2x{Qz4<3$`k9lI6`HYaflm?0{sq1@lS2%ER&}V0a@QtI)vgY
z2&61DOP-6Vy9U*FhW05g*A>yJ%_<Wnhjd|0mpf^SL1T+%5yifFlpA%%?+EX!`{XvE
zvX`X;&Z87o0`ZPYQ`=9AF3|m}Yx4)g8waELmk<B)I)3YL=LQvB4hA+|(EFFgN056o
zf4H&w!{i<M_x0v-o!n-ThQIBNHB)MBl_L=nh1XUK3AIJ#AfXbBk6gG!Bws)YQVBx^
zxg;V?=PeQfgXlSYH***XbbQD`bXerzaz&fl;&eY>N2W02_7?U%R;8DCpFvI}sc^l4
zUl$p5H>25-0rag^1E5U{v-v79#*AbgJ7TVGM?f35I2_hXW4|?rOqkK;-l{H`t8~GD
z2`0~Au3c>6u4%u-RbKiu=A1KXGIi5jRX4GQdFiHl0$;PI>T`B6vGrfb<Es+DvaU<9
zC#CXImlUKKBVn~X)77ew$fZcMyby>9g7weaqNI!7{Z{}nK+eDO`asu>E;#WK^vO|~
zn8jfF&Hu#ORi&+sNU=8Jqpv07ozz9&c)E-%2+l1cvrjezW|W?BRSOZxsHla2Y^?N1
z^z}-@A=R|D`Doik7B`cm?fVU_sGKp0+`91DLL#A97sQ>%Y9r+Z6zh?tn^sjhCbX=I
zi|JXsn`iu4%WhOl?k$XxmoQDj_{{EZX%Bkw!!%aPNP8Mn+@vYy@OqBtF0o8mcA;gL
zvBp)e(A%AWm_<Gi*V6RgWVHs-VrCj8geiUITN`+n%9yDN$e!b8u}dtMIX&~*$|7@$
zur!5AKL17Es}aR7<4|SqTo|4z%$2aL62CEkVd%ifk?Ct#(!aI5h=m3#Xi6{_sWMur
z*4IE~q6-5ifQl75Puu-8Q@~WUB8bEfb2Eq62VeIN0Nzdvzlh&|5uf|v-lM~<IKA_7
zXZP?{=0!XOusV@+rI025h8+D7GI2vW6C>LN4tfhUj5)=rHPBrKz)PGIXH3~jsI~3V
z6pNs|LAq7_F2~6yR|o~6@{jAiZ{`+{?p_B5O#eH&zfF!3i${|S$Eooj*V2Ts;ucb)
zNFLoT9Q7BEM$)=U`R1l4XVW!GCdZ?1PqCy^dVPSotOn=SOpArTMUz|WJi^?Ssu~%l
z&6*0|txJZr^@1>_b-P&U3kk!r8`TPK3%jw!Zr5L_@I#_{o#HIG#1A&C@?Ai1EA54#
zis;e<9DR;yi#d)+EA+FWp50<#*gT}tbM=|;ZeaEz63q#`nKF~n63a?&E%JI70iiDS
zF%G(gjUPG*2X(FLXMrvPDa|uDI;|}vXqMkrNN5%_En<p!M56ks;5o>b8N{zk1~N@+
z3qzP$55GIdu00`4_XdU*(n?c;%ouyJeW_c5omFa&++%$SmDjq3F=drqLSk>D^1V9J
zQ@gB6{s4aq`XsB!CMvNBm^9Y)J^4DFd01oQ*z0Ry&Rdgg2R}g{uDg1%n>1tL={6Sr
zG*3(0ep&>lu2^R8(>yJy#Ek<2KOoX|?t?$eYG?FPJC&(7R8pRec7_5G*n(N<y*9Nx
z8^p&b(@qScHcv(2U{^UEq+LjpnjRr7#lsom#^xJDg^ZJ~(Juz@3q~zhIXA6m-Dxoh
zCF75bsHQ4O;8HJPfRj=KQcg3jnl(cC;%9a!#Ar{}dOu}AR3w}W!g#a3xoH_8VMB|P
z0UBApL53x(Oi?)`i(rkS-I7lu3Z2AzV>A`PG`psg$Xi<eKF_|34J+ZdPzb@SL$nye
z*<}{JKqooSOhAhKB6@*lXjDYxK%3aK)!qg3=Mex-(=^)a!^;|D8QkCGAd^kY+bskR
zz~F486KfTyKhBpTy9Iuc833i%qxuBHa&Mn-f;BY8z0U3aFqUIRwV$@H{LnjjP%Iwa
zT49py!HscUi1olN>znVE#t%ks8}+>`_P!oog!<MV9Ts@4H)md5?scfxC9{mGG4IGS
zgjGuP^+FQ{J0rEA(mq$49Bu6gNQz%LV>B$9MNuOWMKqJ&Lq&mDG%A{>yCbC3={+&b
ztJaB4tlFCTrM7b)d`#aRwrK+AKG+|=v!6`uk7V|Tll#dLvVVDQ|MHq`E-FJln}6Q_
zy#HC|`3+~7yu?nS=;xyx05SB~^Py*J&$54CwoNF*3Z<_SeYh$-v`ybm)8D0Q5!*ar
zOEb7_7i^Ud2@UI}b6VOLf0TRLRmm1!(>L*Fu~vH&1g66BWY`}p?~tDIOs8M+*(Seq
z?t_n1N%7gH^3j@quf~Ete|fIG9V~s0N6j5ZmrNh!H}}3B_%{Ac?wg%&R=!QxrVjq^
z?|*mBaae<O@U!y6R=*swO$MfsSS(;m)g>6m&kE1`ZBr@mOCz1K5`p~{_K@DG_3eIF
zt!K8t93F+Sor)dwF?>Ju{lxb-zQ5%L)PoLdJ4!oLH>0$Jw(nD^-8Kmwbl>%U_u#wr
z|Fi90MIZ3?9isSf7l!SuQ2e2DAN(%XF4^vHQRw*jq&(9ekkn54P}Mg3GU+h-LREsT
zN?3BY>mczrThvce?f2c-?;GFmySLvrw%?bcpI!L?V3wt#Zgs+Q8cv%Ej#GuBr0pI8
zKh-_HkOERPLSI6UZw73iI(?bD97Tan%VN7@4@<d?2`o1&?)Mltw}L+Euc)qRJ05};
z==8n_I+krJ*=3zUzw^2n47sW@0>e4fF4?ar%%EeKRC3x<LR-A;yvZTQ3>Z#(7yjhy
zk)PO3;BeH{lBSypT%=S(`ifw?FALRvm+$ikjN6SBZF2;SlN%4U%>@0dZDh2~ztBtD
ztaK#*QSzy_EspqM(R51mHGQ1r2WqrKvY%!V6`cw6KHacS^&uR}1SjO`lafQqPo$`u
zgk8D>;Jnj=>gY+P)H`qB7(it-%{AL4)MKfVYAX#yL0p^Bb;ANx`0TfzbkK^n;#!gZ
zo6w36wBoE*T%nh?nP@+khvZI0^~s%%8$?_VwHPh*`M|T4zb`zqom*WByoL(4FCVVA
z-YXvL_PN+H;vylPG@p0IE-9TIR5xk3kL1gM<oC;wC<%ums5e@CJP2<U$?&Zf2?Xp^
z1<2JqX)nI}!ezn65q?h_7f!Ph5ad6TF9zh`lbP}o351@Mg5??Ggo)^x_>cWEWDS%z
z+Wksv2kZ{>^w>bg{-nwkdZ0WLAz}IJm|R`~i!F`|L9qUXKL%w)m8f+@8RJYCmuHYh
zXZXQw`Zqz$cL^RBPa(`+u5%Dvm|U-F>2V~*X)T>IPQ5LePPwHU9^e+F%AbJyf}lbt
zFzX)0`#c0O_ERWJBG$9gM#+Ng_tjMIL4NdyiR*YMy80ZmD6<{l$BUPF%;7ATap&26
zhm*siFg4%%W@Pp_HARjJQ%8eW^n<q^yg`?=^?5!1E)Yw4Nt?PpY66r6#E*1Tu21Sr
z6wk`KJ_?Hogqh_A-^#FzBUL|XYIa9oVk31Gkz|RTD0Mg^>IE~hgOqWd>Vbrsra~%1
zhdJ(gxTF)l#pOB&C*gxQ*c)f$PFqNxH!h4FyGvT8V0Ylq1Ax022rBZ$NRI@I__kl^
zt}LGyCGkUlpUT0IYCX*dQ?<O*CHn=@*c5|QAVTR;2+{fu$MLr5-wil0v^QFN>@F`x
zus+4c($~bsMZeN6Syn1h5S8PAXp$dc(+{levaQ7Q_8>x`)do<ABc$a7hf2$J&&r8R
zEGEoypraoz%Yh_|=g9_8#N9>evTHtyR{unsD2th5B*gj@+4F<*ldFIv>mLsr^*~B+
zbw3zge%-gj4s&@ry2VPln`Ec5&ddQUDdtmL8u3lJE=Um>c2vp%FIhXpGby2nPwoN<
zT|948ZHt@_tD$z6)W(z-Py_CN6wj%K=o%=VEvC9eM5au>j#fYH4i(NQ|8rZ!TgSA)
z20w2DpiNQbZn_xygmbpTX+hy!9id3n)4-u3Vw6o#@%~Y0x5lC#>I|#i#v5&=L{|YX
z89i30e*ORKT}yM@R+j!Nx3ZZ<H>*0%%h{yLHBM5q=;>7VWEQ<ifh0sh1OWyBDN8H8
zEn9Nr$fgxPVuhA0$qX&)Ey@phA$u!FJ>x{4Y^ZVMM{NIxx#wJ50DK6<1(cqysi{h2
z5Wu;(c-+T1_dDMgr^B(95WmTET>Raq<r8uuX0e1#L0kG*#Es}?EEyLz(`XAeL%aq(
zD{H3N9%C&zyggU$6*SLEh9!%?m8@i%ogvKBt_*D+;^RP8r>=Zg_+FW|%0$+TX(?e#
zL)R_r^u#T*;(eq+`&LyKucZm-gxrZ3drmP+GE#Dw_iD;kE+8DhYt=3#_0CwhS7YXq
zy0{SzLE`sEAB?z7P7CoQ;bFw^r3jnml}rp{9+LQ!;6&Jgy(<^+YqjU5bOxy;K79AY
zi5MN%Go83TBWA9eQdrw~TWHgyAtiEF3|M27RLr0=@tkN8O5Vzcy}Z(?zWs+kM6-@&
z{$3R8ccNGyiB|O^e|RT~`8#jFAIbc~cca|<yC1$E#r(Z!kM6y<--}}YezdEDdFn{$
z7VDG{k5odWC-A|MNFNV15B>Jjutctk%zzzJ>Nd%SC7CfZdG!6G?}x2{x0nx(eh|5s
zoEbm*&e5Y0%Av!5oA|?k2~x;wW)$kzC-kKJMZyX=C_A?QDqA4+@<&O7<B^PGmMn=5
z)Ac3f0zyQc0<zSPZc_qIV;rk5)guuGI%<6+lJz^`CniP`kO@IdTy$&gtRf|AvY`Zw
zSTq#UreuX-#vap{B%7Kr4A?F}V;_sCHFrYOa#unUh6dL;FPWhIoLhnwr(Rk#aB)(O
z=Ta#JJj7ar@o|12C~H&^fxav&exgP`!H>_OH4?Y6Tu8c~W@G_P^6tesqny?L-~Lj#
z=3kTCuS&@Wm$j{%t?;(a`-ruCAI);;lX5m^u+{v4TvK4QbtA5)=!euAu3AJY_Hwl}
zfvM$vbVHm=s)#F(Wn{&@kE`etLflaj;6zQEJxcf7H(Q7DM|xr|HKb%KD#2FK67bo7
z5+$npkr+HMozx5_4X?x6HKZf2+N|4<9lRNuLF(1{^RJc$e%%-hOdVCMuJ%+{%A5n4
zCCy%B*}5#7meDRxh2enmyhz~twO^zl=hHOj_wB{JRG|Ew!|c*B&kmi6%=T179@#Q3
z5l<lh?E>8h$L<8*jNU0}iZ8&gdB!CBOmVHZCGN3UpV_!cH|}6E#BG19EiBc3BCF=s
zT3Vw$iJ51Khk>}A?jtekFs`uP=lOp2nrj4try??QmT@KZj3<3<!Y_rcC0=g`63-xQ
zY^VQ+yRZ}rG@MQf+d8DF!K0%)*60~axNLfFXDk37@5Ms@i(7@jL{2X+PQM&&#oS~w
zs^|uBMq`pu%(&bx<zZXOX6Xz8J28_;pxdEHDNQ%w811mAVFuPEU^t1Zaz>oasf^>^
z8V+g~aVWdMh#AMYK+Te^TmwM2OH6L*fbpJjF0tKZHgXOjvr{jzrEhGFq+5JE<(fLc
z!ZYoZfKvn;V|UBh%L|yHO`q}g*riQXXD59t!S3;<P4ojMb1kMOAux#tK1eU$P~#I_
zJLriby-<T_QFZ1VKMFhMn>@mqrjKyOd4x6<5@S57dx8gD^uQ-rKLH8{wfz5QC;aRV
zo=p->W>iqxgUclT;4&~oCFp<cv@=nav_!Xw#QPCTN#{sbx3+G=X~ZP?t$S%*lQV({
zR4+3W9oE>4709FLnQd~rDjV$*vd|sVfJ`c<!NDBu&$@F4InL@L<Jq6_Uai~i(}cyB
z8fjNOGhpN>z&<qrKuD8(nUjghgpqW^k~ET$RMO@2vp7WCHDTj5$R{aSyodt2XLg;{
z={>Xgbq+~_%(`yAO9F~1s7Wp_DJHKQJPL~dy$^qPUvhh_;MQD*F%fwN24P_B1<v$T
z_9`(=0Ywxfr4=}v)G`Cv7Hpy@#MyGd?E<E-F}<W-hRkj5BMdr{N)DyfO<PzA4)i!$
zOBH?rV5#z!JBQ^mx~e&1Gg@I42?qu<@u)cmBwXW|2;+du2m-drSq5=F+gr;fHG)&$
z5vI3vo(U>B$7eVabBzl-fl@|=LpC>E&MKzAG2>t~?cGaE_QUawz%dHVS_Rz(Ya535
z6SP}}MlI=PTTV-0)B$)q61ri?3HB+<zbCBcalXd{EyryuiUtH&thorbmZnY_vR$!S
z0*F~5(9UU{Y_2V*PXl2$h1WwtDJZC?Y+B4t6!Ls?>S^!9)84WDk-{(cuHbeFG?@CX
zff`hQi3>Gx^{2k^8uyT%-_whdPKBp^H_%nQYU8GR=l2U^FN>qpWPf<*+10aL|7n2h
zLd}Mgd79?JM<p+-)hd^Gnnw4t;;>pdT1cx1#RkWd_<5_S&a~lBV>Jp$F^dIW<_0tJ
z;Jlac1o~YA0Nl}CTELUg@2wSQs;faw<qw5s>-E)d$g9#F>Ox<9ef4Wwf9dswl{e={
zfqv_557Tb~;U>r5{&o2-c|EZB>hyK;s$6;f-4*hBcHq^>-0N=#$m=Tu05fMExiIZk
zqwtZ-=j>0XWkb%Lle}6_j#RU)lSG~VHBOUN@LD&-2QV_gVC=VS)O!uL)^t<m>DI(w
z!tnM%PY(+{ChZ2#@ZOd6j0Hz233pad9nbAtn@1;B35U(Eb0MTnM@DbWE*;8UXDUc6
z&7+KeXB}CiK`j0f$RpWh7q}?Xw1dNUqaebl`U^IrR=Gr~<)Lc%k`o9i@L0)pCrIL<
z+qyZ4d|4Ar&L~)DWek7!wNgqd&wDAs=4}u^oUzHSR>q>4T+=dAi>-UATvJ&vp~rh$
z1a?B?1WW+Ne{R}C!3D7YbL=-u0O&!YE@16Ci;P}uSHaB;@Q`|e;?RM?qd|w^ixGh>
zAk|j^sle7)Js2BU1ygYsapOK-eYO4~rRt3<ytY76MvaGE3bJTyVcwV(i9hV42Q*MQ
zk`nEDXRhPHV4{n945sqnB6k})6r$KeY`1M^kFi_Jour#|b|s|_O@DYfp*$EA&wTJk
z#XOUC!=9~MN?tc{Zq8@w!1*d4H21)`nki?pveqs#@dyxQvsGE>4gle=VvfZI;rl^;
z1rYdm`OnG^>zH-10%9=qfDPhj@-R`@!rI(u9<YD`28upFV(J+Zmn}LYGL;mZB&97S
zW&NJ$^nJQsA2WeDk(QL~=O#=XHjmlBm$(GdjYiV0S}}Kz&Qa^I;yk8ooZ*QZuBs7B
z0a-jC1!T#aI^s}@E%A+IyEgVuyVlJ^m|>Dc7Pz+1CKn@G@W>=hdnmeq^^4_?%wcrX
zdy*USB$#W$#gj1Oq{JD+*oh^!7p0l(bdy`mkclR(;jSev$~46|$65qm3)RKbc-ewe
zdXr%6p%IJiT%dcF-F9KOGkWq}Y-zVKvt@H<g6&+w=6QkXDUo`9e!^7B>RRJE*%?Oi
zu@x+hn(v$srHX`6;1|WwUn;lv@Ad(4*xbne?Xh3(T;Cr%{o>Z?XBTch>zRGgfA8hQ
z{L5?KfDjSJfWxHHJU>78%jVGYzT5kg>v+jC)Ej|IUK)6I@)SJ^*S<T(Le0NaX6TvE
z3d4}SgMP)!o+WDaV(#{{3+t`x@%PThNrk|iN5aNSmLY|}@T*GPkYUJ)Aw82QZa3UT
z8Qj;BvN0p4SxV5H4i{@0r}dSRNnML264`(N=VsQ~E~_0@P{r*7KP|^BVFb>Ys<nxw
z04N`$Qv-N}p-RBl6tV<aZ4*F(9}>>Z{wA|(E~RKOAlwrLEU5yYri%-cGp0qSV4{#J
z^NUQ-3@XRSekVau?~YwwQd5Q!^fkJm8vJZKq`U;MCQVMzrYKqj?bKrQf05i2k1=F@
zi6j+x2nuOuL1%ME-~XL37Rl*pL{jUT*}GX}QGrQj+S0QzAAk~`KwO{_ERBBVSq}{i
zT!M8Zw$xKVLXT3D1*Pzdn6_95TlSf)&i1gyqNu2-Ae9za>pHlW^{OfXp;Z5n_48RV
z%hG;E&(p1T9S)e*!;5kfm&r_3PU`usBAmsR##)L7x@aw2;PQNxu{!+}`!jBZ=Q>`y
zdWADLkZsUO7iq7<dU+P7LSVgMamkA+Najd9XULN8J#!Ektg^6mngTAmj8sdd5Gy}n
zpkol4t8m5i$?Ek})#(!P!Ib$f$wb1OvF;CFj=Q*mb9?VfNMJl#?ZO3yJNR0N467n7
zzaI`(>)QZ@4d7|%heeCt`PwClx#sH=!I5nf7<(^ZIDd!*@(+InAqq(D%);x_({Fmq
zZ~DJ(9>EWcowGSx3S66Fgh#qdEKYt9x-M8~-m~q5)aUC0>xY2RYu#d6hsOmmeMJoa
zGM1MD#-2+}!qJBpjgAHl9LB(Gfpf%^4k_jv0a-{CqQ_Mw(TzVm(N^;CvY{vSPQ^@<
zlp(dzQ?#x`djKTjaTj_Bofn^0$)paVc-#r4MnotQ9@bfLG~+!H9N$nW!SD`hThl|!
zTtRyi3is8}g}~`w(uR_c>qgS_L`}^Y#zdVcs0bvI=_Smg#QJ3Q)^O{HrlS=fA*dMv
zX*tAWgG{M?(g{$ujs>=eKlX*kYKp}J?XVtxH>*D^?OdS$;{N)$-h@X79fO87O6U_o
zs27W)FTcHJ13t$`U)(?YqBt5D`N^!Gk1y<B>t~_47sDs*xST&6^wZD(g^707K|&i~
zpy41s%;^zFcCuYd)!AWjZ^(BN5BGcMNX!VJK|9~0m2VR4o(=~a`G4YW!1wwTho*Fc
z#HHhfdEtq+uO)~=q?L{NArX&d@e+t}V(`PH3=)?v<}eD>5FQoNsr2FtYZGv&%N7|w
zNK3>^0gO<s^WH!z)9b+flegy(1_L(t-TW|rtz0`7lE=QRh-kb60eEaKE9&~yVeGJm
zhk>)Bv$AZvlJIj087|cW!3!k^iMo&YamYrk=a}@s=wtxId69I2h`581ZvzM6+-ls-
zzHvI5KJ6WP+B^BQua_RhDu-5n{0(qw6Ao++Kx+6M91hOx&G~^>#VfzAS6*G6YwCx-
zxp4h;f3F8Z1{defsThjvFpy$i9@9gCh2gXOEudkoaA4QZ9PG)uZZEMxTY%xRQDfZE
z{f@fCc9#$h$JL0-YQQ-CrbsOh)CzCsu;EtC5mX|6@`0W<BA~F++?S4mg5u7yT#twH
zBA)0-ZFI|ZE_XC|lQU6h262i4TsnthX#>=KratmcFL+154_awViQ5?ojA~+9Zhh!d
zp?7xpxF(z$nrVsT0|1<AeD*L}hb?eNz6d6C!$qBVaNiq1NB9*V7Vs!Ch^Vhc7~sEq
zMr|w**87m~K=$`ao$`}Fa?8{N>%P>MAR%uI!Hy9u;nxZL#wR%$E)=t7hYs#Ef`j1V
zhX@bazkS{Y=iDCKA0K6Spy!tc@aY`_9LNj)f$}l7z|cM{;1KXye;3G4Eh@xWwD`@D
zAudKDWeM|pU|mx|nPXhLxGH`vopx<NxESVo*+e#snU&Lo8DCC7r`zn!iLLGDWmV5+
z5NJ@ps_z^Muzuf0{b9}ORMKhSQc0Ow&N5TF?5knJWe<m!_eK`>Mo;gJd{6d<zu6m}
z+8Zv>AA2KT?VY;kTLMTRPVL@$GQ2yxTOy!_xLev?e=_{!^zLG7yum3a1++Rl**Xo`
zcexz}6Nh(~cV~YdeKJ6vZ2mmDyRy6ZWH^9+ViCJP+SNh72Yqla_J|8RL$q_fio4kk
z-m&_Y2$DVDG*7!<r`<7@u<nHK`xwW3E0hC$3g2^;m2>d4YSVGODY)~vcM1by4e7Tf
z4i1<^sYZ6pMi5N0rydB^@Q+d-`W^jQ>dxyO)T@_(X5Sa{K<7LkD^3{7l(oFB=CTrN
ze*RdAf8V-Cud-Eqc+sp0+WFdbz4g&CgZ97VxMaoEu9QDo0)iyTypqNSoZp(ElfSJ>
z2jHgsAr>D)!%w8u95w5d)7^5;pQi=|DGQH!x0kmUx9@DPZr|EoBy{uMcKK1yqk%_*
z+nd|#kNOBbcAuWJ@u+|M?)Jv^?d|)Id{<X&=}MuFt2slDDH(>Ysx750k3qyJXXd4L
zh#dKLcDBmp-0l+nyZGcJ^b>^sTzoP_yRE?|C+VCHdXL?upGSSmQD{%w-XW_Q+Pmv%
zxm~fkOsk8|%q2C~QW&!aWrB|>E4TXkKrFL?B!zvudATKkf`u+uF?7j7l@&)5C^)hN
z^ffPo!Y27?`lpLOm3}Jx<hyXh;*qO%?7@v1Ok*5jp6(>o=~JkuVh0#}OEW}TIjC}e
zB6p~=*=;@in*L31t##A)<7-1(>VIzj*z>sfcnbe+KCaMzSLoInc|3(b<{y9m<G|y!
z$Jc-K1y(iXUQ$f<vZdC5m*iU?eI^@lmlA~HwUnHR(<72UZ4-!o{#nXEJF<vGK9e%>
zTpC1Dd~85I_o}hAOea5vwYBVjGHq>tvF{1;o`{)OXxEwby%&g%zPvE<{Nn9k*cns>
zy^4~c-|qE~<Hp&&{>xk^^gP!LEfW0a3f<c4zlO)A@VxW5^O|#NYpBjDMl}m4v@{nQ
zQ1k1;l~)T_USGNq06BwVqs)8HJUI8TC~34&8xOuu`+n87>S&85HCUZK=LG0|ivx%A
zECf7o22wLO!Q2wxN9Pb{4Pm5o$&2|xfX<Ld)M@dMsI+Qsy}tWsfw=(<)MLC!VSAz<
zy}vqX9F{Dc(OAa8xXaf<ZdprTbG2>?-=0&m86}yNRk@{(s$fk)Q}wu#R1D=|u~Q-z
z{Rqjw(M_3d=~>xd;+KVO%RDx_SuL-TYWWmKQdu6W?|rhU=qjdpvcP906c)%1fYDL7
z3xbyZ_@0Il^f(_uTSE|=^OOk`07|Z(YqDwqHG@VwQEE>IGu3LLY1<S*)Wm)8XF5aC
z*A*`MdZ)VHgMUv~*A}X4i=?_%sjgqdz4vhYv~Ou?cP4C@X*t+D7=&T%@mnIgp(i{1
zm0!Pn>!Uw*$n<gjwVjSKs*>^T8Br+qJGbKkPBQ}fP<G$8>Tk^TW9<scIKfE!-w&6P
z6|u=<E0q(TW*P)tb;=VG-Sk9F=c8eu1~IgGA3~&y{ndpTjB(D|%3Y8fdCM=;(YJi?
zf6r1EdJ#g3e_Mv&IH5ll=r?;0Qy76`ZULf;JH7wm+f^_<+n#D=pjw%#R@SQ<!_~^o
zYUO0LQiPwdU8+`=QR;RIrEV{w`0Wz@yo!6qQ45#u8Nu@>tCd1^V-#g_XW_IB-|0T6
z<9<r}<A;-$V$dVU03fJJ#slB*28MGdFpvOY)e1vkt=m!77A+REb9YO-8&3v*9^G9e
z|8?W%aTZyb-7WdqftKz+Z>?x4lGwX*on+g$iWad9W$#YE@AzSdYWMH<?2mmNoX-#D
zb$0`tPCKz<8OfrfS*}|le@f@*CjBq1m<KEG9wTwdR1(A!9wW53RSaE&ZDEX8eW0lc
zE8cu{cGLkAvA_gEy=E6}WRW=Xs%TT4f&CI*;UureWb<f9V<l!d-YiTdYRG*5*XjY$
zlfP~i)2fuT^G0Z&spq$Dc#}&6p-m9QAt?~)R^=4+{bk|}g5)wOIZihO;Y*)u1cq`$
z(`%y;Bu)o8B_Xo@D1*YZLj5X3=}zk;B^eSO`U&m%bF|Vj{k$zPuoKn|8-T`$&l*Aq
zum{xngkeoX7y<wc6R8UnAZW>jHf`DzXr(D(WYT=~`ly3NAdRzRTZxtH2g6)h6kene
z>hnSnNp9kCv;IU7L{w9x>+!<2;DZK7cz^Qtvn%E2mj<5pmY7q_AhDwOo^hKV3IROx
z)#W~<kGhE))1=O!Wl16>mnLdS1h$2qZDrxra)mGlm%O?7&96)Mgc+R15lurYyfO$$
zB7lQoX(QK>)M<_caV8y4@+*&XxynH^Q%%nn#?8zler}oYtdLdot~c=8E#cvlh@YFJ
zXN++;4f}c*I7Vj9d!8^JW&&X_Ecs)Mpgiy>umz#JMH(Hil1m)~86{$UGjqmHBD91S
z4jeqa)wyd3E!iZEZW<%yVH`Y+J!nc8a`Ks;G%cx9@02~M8hX7yCypukliOPD4!b5)
z%KbDU2uaaal3ju6E8Vt5f_s@$c7Qu32>19{vq!S6=~POBu`K8M2^mH0*jf|Db&NB&
zfI0|+T>7`<!C^px#wt?KEa2Wt_FqiOxP*GaD<PGE$Eynk;vsus-ore476nG|a<jql
zm|^5Et9be}{zd2p(6jW3!YIB*R}g7Y=$o=Z4lB5$vU3V{)=kF6VGXP=XEWyKU)`d;
zco_TwD}_y@*;o)2nP3^YL(&GebdWL;=IH>#jFb+c)vIXX)C1#+8I+I1D@Ciy;<9|H
zgt!JRwTUHu7m$U(YbRTld_r8<S5@MWF%v@cG{j5GN>(C-lxI4{WOBsCG38{tWW|hZ
zCKk2?Y8uZeAtZ>w(o|BS)GzTaDd<<@qI=AQo)sk`Nmx=)(MMMbQE6+VLvg^b%r(<b
zbfsj`ClQhr1{zSRO*bq<61`$7sJ^+Y)J8|&pp+jTBTEh5=3ELGXgpz_h>;MrFmv&|
zY?z8Lago^>w2iqYoc}}^VkXYD5|`p~z{l<}>J}!VNwgdEZ3XP<JZ8Z=RgaOLq1TGR
zXaDZIk&J_nI|mhz`rrfM0`lV0Wpam<0wNJHvyW9dVHrx|6FIOFxoa$+$Wm)zLL%sk
zG;w`1u+!rm!g_GrPW(M8VLm;eda@6jA~alX329UK9zo2}O43`5CH0^ko0sRwNaPG9
zs7|uSP)~l5l=5Gg`G9?*YpO|RK;|wFE>$*EOG$PK+acXts7FI5EJ-;Z>NQeR>U>6^
zzA}@5Y|e{!&YmI;pbYyT_O7j|jU-F|m5hGr-H2wMc3&r=XFNOMm~pS8J(tIh0#!p<
zqNGukaGNJxxcTnxMz+BhOe3(kNkZHN=JFu(3T9$xa6gHq4emDn51Z$l%u)%YDwSY*
z=V8MRQTNQM%E~<Fd|!UY6T6l-j3!8Deu7y`o0&+A2@@)=KwvNINW(&|YZhP`wl}wz
zev>FuUire_JS^cVaLe{4-o)NGBB@rYz%}GPLTJ7TW|N~jR5@c{8g~r=3{jq4p2S#u
z+?uIOTSxiPt`yn5={Ma#eS@m=yZB-JO)n!VT$XRdTlntHnk&C`h(s&4quV!8v?9kK
zG{P<@5F%x-SJ3BPaK^bTkOZFqKryx_An%*VrIc%?^49HH2n+H7IM>_;i2Pz@8W`{j
zZw&ht)~PU44X9rRb&XrIF5FS&qhIq@$r~lkUR7W&Gs^INr4?ss^(!U(l_QW2$jhHZ
zfjHGXv@xKCf>%|yEQ=UV_i>ZaEEc#Q8ZcB;U<ii%O2B6_T{Zb4$gk**2sR)!f7VUB
z&WhO?(OYRI`i1{0K561#U*|P7?2MWtRdsYLy1BlU*y`HMY{fR0fSt-V=YWyA)wOkJ
z>&8}s=&CE5)BLC#j32lNyw<Irt-G76{M2qY2I`d{W|V<=;peK4@fCKdIO_w2Mu!uD
z5~mr4+J=1*gf7)qn5#b@ZjI0?Gb62z+y!8h{wd<uSSSKl(Sa7=4_<FG)$4vk3pf*T
z02jI(hGJBmwX`YWR=k72$K2&CrVUnn4lsy`Q{R;c3m3<YI>pNX>dBTS__qP`*C!aw
zVTYOO<s>*s`y}`>hU7{ko!EW0JFxqBx1a5Pz59?aCU?JKyH9r0yW_i#__mc9lf=lf
z7?bZNr6x*2K)koSJ(mmx<wyA@H-c$HUny`+#TOPdSX*&8P@L^lOlB5`+k>qN|6@3Q
zp};%ciSCT<tn3W$Oz#wSR{7ugok?Jw&hbCP-^U~gHMg_CSIgYg!uRo=aegfPU2r3$
zit!C2;-VJ`>bw^Tt0v`(;r5VoYX@HHg@``_OjJlw6{r|M-FT*d4u#sn{5Xww9fn~>
zia8yzP*=*Stha5f_F>`fGH=m?c<LaLg2XW}Lm!pi5_fQ+?;w%IsruQ2`20cQDRNW$
z+{vP2h_dD6tojj3Bx8YaNxt#~_vGOaF{^$a)3fm9QA+y451PWII4tBH=Qm(7mgij`
z2KtX5=6=rS*!!;3hnt1t$G1Pkm)P-(=<(zGZd}ugYG;R{hcwmyY9-Ray9kJAnXe}O
z0nPc93(Dy~<%_czq<w(+A(+>M=wO9-TpHoL84J#nke@X*KPklLY$;nO<TQJlxgRUd
zIdY7wG_B$Wo6hYHaa{2S@q_uCZ}KY|d^+4jYI1JAKs~)eBd-{CL-6h3xNE`);m$&>
zc0XyvB!>jRF2wHEOQ`{mHwHx2i+ZyffL!6cHY`Y{f*R3XdIG6@2hioc(iE@TR5$6~
zr4hRvDoxMWX6+{t)*Qg)hcq#N33kIoewe&pk8&LVmA$1jFDl)QC?`nQr9sdRR)!>s
znWPjXO9*#nOXIgnX~+2ufMfo5v#}A6@Dbn8)E3MI;`U-grnKq!fDD#~YA_r<inH86
zQyeGC2#`~r&c}8*U-ZOo>{H8-MD{5+4|u@==>0cFRAAC-x*YOc`KTiRd%k|T{WVS`
zz6G=5=V%J@cH;Fqd%aAUTf81t>Ue2k8EB_KM(rk&Dl08~S6aNsw=&$&Ra#7x7W+#J
zgQW%MEBbK==l5A2zQN{24O=pLozoE;f${~o3BY^D7M+ic!49=k>2RiY0$;T`642V&
zW&RDcYfLXrvd_a;Rg+zCZxzU2MTd&JQ+?!2AsHWa1o*S;LdgNpFIZzRpHMhgK%vZS
z57x_XtzzE*>>g?7tzb>*aI<tQ&g)sP(}A7af{#=UB39p4Ou6=Lgl!L@tN?)1O$cvK
z*i)#Cpp*e0V>3OdHIM_KJL^nX6{3KUCuAM@t^C4(Qwu%7G|B}I(Tb%gATohuY!G&;
z9%k(G*SWtxMuERYWEK>F9s39;h|VKJmVZ0-v>G6$7JJnWV5J5$_UWr3pro7Xt06~+
z0}=8;py6&OP%knsYH{*A<|_=Qu2N&$eozO4^zc^t?Y+gL)vu3UJ~`~q9^Q6*8d4qb
zo6&azLq`kwcZ*p#>E2vTR%kgpw9ummYp_F8*kAwp>1Us?%a<=RzVG6d3!ix;h?dVM
zQYc=7VO*t`f-G^A^t)A61+e#gHTy6|VXsj)Mj5PnH4$HvX4Srs0f|iYs+NNkNa9Re
z6Lf#6G`zNm1Mf@QgDt2p9yGaO6xhhx-kh#B2fe;Kh%fw=_Zc|j6?5nQMGZG=m{6A$
zSDLwBV{01RNby=g_58WK^<fl7=f3`QMeFl={mLn!WykD9qmFpJF8J}bCIn9WsK^6L
zO<O3><w_Hin9COQ>7JlS`LN`9R!bRsMuj?Cl_&!tRYnTUja888MD|x<wvtTMkX^p6
zMhe&!@9K0Fa~nmD)s|o&ptcJ3sEP9`)w8^6@JZqHgJ=xSKZvb?-dik7&){hO_EG=L
zK`e84cl2ms@E|t%<LdfRfBJkfpxT~0BsU#}OriAXD}C1j`lJ(50**MO+?#zN$)kB<
zolap&Qe2l}a_5+Q)T-_K_#><)$iIE)nFskd@`BHvQgj%U+rUH2OQp7Wjg5EheA7=X
z|FZl`q}IGVu}4mAB&SsPw)~*`cp;*_4&%P^b(vH(e^8q5B|dBtXGvPJ>i?vaqu%JT
z_U|_-KgD_5hn%qyVJ9gK=o3_+9F}Q7X(HtMx#R$~RY&-W8T1t~hbGksbly<Rja1-Z
zmHU>x<V?m<815E1m%m&FPFuA>@C}n(Qf(pAWek&F=IjdT6PAnm1Uys7f|)MozdQ%!
zH(!u##9wu{kgywOxeSFn)lOS~7l<@_?eS`5vV`HY#Z<y)9_6YltdpMlGIvq$3cafW
zP<DHTDZ5xM@)NHGnw4)4oDa&Rm`BC5n0Spqt(`+}BujJl_@-Xznpia4l$1t0=e&!y
z3W_v`W02#JswSKjBHHFC_o1bhRzQ8T7q}0n`+?ROsiRlPd$@MKNe}Z<l^lJNmt~={
z#s7&8Z1?lSR&g(FgBJ~yUdoBM1_JoocWsS{=-6K0ukd4jiVnV8?`5{p`mpc8yJ$B(
z!*}b?V30n09>bra_kN1qJyFczBViHz6dgHQ9JGr0*T_*A2zMHto~e_U09&}E=&;}O
z$s+JCPH$3thIevnsNRf!wsRsz^A`Y$7}qqjrLBo?u2Uh*c8zNtz6Pi%{596eO}6oU
z44?iV{swcUX}Z=D^iF9BnVva*;ZPHQ7r`pbByY5iFeanBZl!2D$?p*R*>A=Ex@Z_<
zz2xzWG+pT9GpP@s$0P1p^Cs&eHjo5Xue`<(&7!QlrXYXRb=ivfoacCDaNGU(sMsGb
ze0tdp(*Fxcz-ldRx@I)FKVk5p=_oNVk6w=EEbbKf2w$Tgk8EmIJJoQ|F?<WI^l$6C
z2fLbk4t-k^Z6CtbD3j}XiG!zUw>vnUM$?w;-!Un<EG9ZyTFZGfdByZFH{jOulLKeg
z%Libk#2PtBOv$WR6T%IJ)p3AP0YIIt3nCqs8sa_R2!b0qM6(Z>T<1<?A3B;teANy7
z1X1wkH$HUb-j7YO53#xTPgjng&K*A}e7NZd2Uu=i7LY^BkAmfOCfD=0yF6R4E}W2F
zuq`A(>iMJ56EFCl`wO=4uyt}vq&_t%Ve>6fCO73JXf{Gec)-4Ln7)Z}H}T~t2~Ov6
zH{TKGJKUhIKhTHe=YwD%=&WKVtbSX&h1O^mXI<JY8uNqP@GD56+hx@6$e{)^s7N>h
z<p3-EfvV?sZS#a9bkkHg0tlEC*chtFl#XBmZWnXq>H!jF`Vb`OL9_V|+RuD7%H(<;
zkBAedP)D%-C8+P>Gue*umBi}>)Rd@vK-&+TACRzZj~-{<t|s4RzB}w&JeqWr#+1hD
z<IGRDyYP&KlV{j%QhMVQpV#=SZxv}BGzg$6fL((f$J)`5^RedZe4{mzGc-*b$lg<(
zAIS&8F`2++&Eir@Ne9U8vnJ#8Dds%<7hsdTI<5Cvxn#Pr0x>)#UtL#1QU>7?OR_d1
zKyT?SRJSo2kesMVVXS;2W7ZQHtNBD~ni$qe_-Nz>tqi22bl>;zp`?VoC#C3tg&I1b
zc4p<0tfIqXsx)pI)5ZAzhgBb<L%qL-c#in8f$tDxMseCKPBSAERyu+mie6l8X=92n
ztQw$`8te#0m=TC<tThL{rY@4531B)GD>@UVYJ`uJjhrjgcYs{}H6>uGrfM+BF7*iZ
zh+#`as;rv@tOrvh0cXO;ZxMVZNM-L+kSrR}MF*w@D=jsTSdhGJOlHJOFOJI__bIvo
z#Rw87Yb}n4fzYei3RhuoNDZ4oUVekwjjG$j1>7n?p)uS!CIKV?3dKtkgSaV=*Wup<
zKu%daF<*l~D)6N^-+d3)QzAtYFiVppHBy-+sg`NpNVX&wW4$2sAEf181EI3ACrzQf
z_kx1kC+TC%l7t}Lh@>qTK2i<Ks^Thm6htXE<??3>JaTA6!1^pVVnG2tk46mMhw#EM
zVeiMHV92Ya!q-cm5HGP+HB8OqCV*lU+=&H4&L+z)@S2M0lp}-u&Xt42x0rZo@*v)K
z5F0s&jU8o%kM8#z#F7W`+la88B8J8$kLG9I-tQ9-oJ`JC+cWYe<KpBe6-u+cNFeP&
z0_i#_zOZTun$}O@a4FoIl1B<;&Z4DG;l*Acm-VKQl7T}XQhF#Vno5|EwJcbdsp7#w
zE{q0|z6$5=xa-OLrLN<fspE(D+40jyAChzI=ea1~_5S{&_c!}()YsS)9D|VH_=n`9
zp9?t&2cGf}8`Y_)={Xi`?9+Kn0aYM$E{R;$BnsN(1V)F6qH4l+RSB-QYZF}a2oB0E
z5)$mKa&#ToU{wz|hD9}+bIQdzF38T-($rZe@u$jO6+6OtsiiY76ElzE?g{KSQ?=Nv
z8R&rNOuP*Cv_c^_XPxSp(yFQxZzaw0tmfn5+!rL*bsCT3Ph)Bfb5%(mti&P-n;gWg
zqy>~ZL7NqvSthTQIt_KkWF1m&fM@lfLj%%6PEm*g;+iBeRH%Y}67D&L_~r{dDpixb
zo5phTgptNhVx(5QBaR^tk2fL%AY}B?h$d2FbrSjaRX6JdIJMi?spx!#(9;O=gI^6;
zaTD+0X;piaGE}z&+i?Pi5603k)DEptH4voHwD`gr#wq&Z6<M_fV<114!08G^RU`y&
zy9fD;+c81<-M+njS0J#!RHd9563r`Ma-nhv11@?h#^X^tiChzIX&{-Do{tKIQl>H0
zEVD|P!Jk%}0*5LFYv(pRH|ND`F|QU=y5U7svZ}_CEs`{diLmEw(WAV@QO)6c1{Z9B
zrV4o`zA(h>1ypcb?K!k5GgtXo)P?P{W$L;kBU&f1=DMO?HbGf$Do{>AumXb8^VPv<
z%EFbCJC<bDtwGeQ9Y#EQ5P#EnOuB-gdxD>fp|)G{qDf0+ideo;E7jK^ULcMYs=&Le
z#OoSEy=_6f?b!D1a!Q#hP&lUjvVCHh!uu2-_Y<~#4;bO_!7an)?ZkG!JwTYN5+L-B
zg#%Q*eD^Rp$1UNveK}&KQ4sO)$(^@1hslsW8tfw8F96muxY#%}F3Rg3{jm^z8@+YZ
zKSNPSDDu|8;f-jyR3r||QWu;`{e`o8`C0;X=AkNU_;BC7J*d|Wlb0oELW+fjdO9)a
z^IZ)#H?{lQg1l4Iyp|hQl|Xx)T1sVw7{jRZ6q~R_xcNVuLVoVBdR8UCQz7&WN=I6~
z5;!)23ZD|X>hqk%OgCCQ-=%ICO(CVd35xW5axkRoaH41ko@oHQ;4k1=d6sNY+MAW8
zYnl$k>oc9)Xw9KPHTjw&jh$FSA)_%uf>f=jMM$_J_Me52Fla+MrfK<XY=nX>_B~C@
zXXYVzye5R1>DhQ@Ah<tLRa{*21;ZluS0f~ZPMBL@ay#F79vQ@Ou}%^9d#NY#Dx#(H
zZgcDFi)mUexN~|KoHGuOjNf{r+ynO);~cWP&wtOxH<%F9wIN!Sl$p#&__8O~0R*P<
zU1uO0RECV?@|np7r{gJ(uherq!trVzynE;C1hAo9;~%~Uc0uLW?15k~j0SWbej6tk
zKqnto99wpEhQSHHP-O9_i&D^2+nXwTOF5Sbi0F#cegvK<1Z4fc9dMi~UO^%Fab+1!
zI&p^GQop3Mw1Kd{qWc)w-%JBJM$II8fEqSD(*rUhUja^YW1#c{181xjE}xV`Ym|_D
z8)E=7QdSvy8vyA~R_fbYkZw{AqEQ5-doARc=JC;(o3bBQedKKn9{<;_X4zu6P6(H5
zDxu)3JH=^Jd39$a3vk!Qn%1G1YG5Pl5re*@7}y;3CKllh@;9m;F}V%UbdgFbrc8sH
zrALhUQgQfi5lvT^V7*xV7~O(T@xe;Oh$!9xWMO#l%Y7^!NO7FCi_c0m84!dugFlXl
zms*pVk||9rATpb#8HH&AWamYa2moXk1Z)ciONyZ3Z%Y%irHKc4kbrkjnl+7pw;8Z(
zAfTV7iGMjO)mj!O9MO20k{B(Fj)NG{PoFe{Ji@BdDzd+_FSQnd(yQWOD+YsCsNjKK
zry`OaEZ^UA{?d?2hu6l7dj&<G9mE#_rHtJ~{B!moF@_-L0{q4@?<VfNORrR#UEaV<
zVOBAOBn1*_%@ZWcYYcbf@P8WZv@{(=sAW!Imz8j1O#`k`#PR1s?&sOA<AEqUj!zvA
zjvqg}&yH_BeLveJ%skIC2{Wa+MT7xuQv_+|VA<@W*?+@jpbY@I8Eg5piFjrxuHy5U
zw-T&OAo7)z&0l;T-<DFdrPPC}CS^mQR~C^ez?txuf13azO_iqN%!XklE0L|~p_BIK
zIn1?#j27tyZjcfj)D5{S#(A#=8ZYtE_+xIJ643M*E!mL@yVWyFM275jW(GG_45dA|
z@wi1}{$P8Ef5GA6>@}wG;b`T$=?`AhH9z0r9T2)weZ)}3kWn1wV+QmTVvO-RnXYUb
z0y<N8JtuRQZ+G#vWef3v$n)!!*XscI2;~ZHKQq_^=+DdT+ad)+PO6x^Uh)X-@Zs05
z@v%1uoHD(mn9_ZGK%`DlrJ<J_;hs^OB;Y6noM5(#jD$vJBzz)(nk*iYqjk%Yx8RWh
z8=f*^KlQYcKVU~vJSWtk)ESEec#z^q4mXJmeS~ME<)&LOYY(UV(Q!=9;v+91B>|NQ
zIinB37zvOxf^FCu4Pb!}?2U%6EtfG2(P%!FZ3Y|;khwvqVhv6g`XkV8ddVH}h*umw
zS>_(b+iz|k^`{T}7T;xi-u6vezDgsA#Nqf5U6HVlchT;5%acF8T(#Vp|69P}M4-Et
zY;N7BV({7f6f(h6Y)Ed{id+jXuc09F86&v5Q7d-)*%kH~c(rweZom8p4sX&GULn<|
zsk+PY?RY%fzm?wao8Rwy%J%zm`+d{<H~6}vKr3nyzu21vE#eGc$kGD;vp3H6Mra2}
z5%;|LdT)_0#o52{lK$N-@C>z)Zd;~-P&2%ZH$A*OUi|Fd%$vTw8OE>ac#ewdS2+v0
zT!lWv^{iH^EblLFLgxd&BtguFrxm0dxCX`L&4R<)OZVGYB>WsMB%XaJh<s^2zy+={
zUFqbWtE)z^FbKc^(1Yz15OaQbEuu8TwMDmF|LE7hzou&Xb)S#@PQRu^+^!AM!SoGu
z=&E}G;Ag|(@P)Y_jcUOCn@gKBo6DQ)n=6}3LKQg9`06Tdn%e5xy0N+HR!>#`;`hJ#
z$jjGlM-jxsZU!^Vtss+1(Y3nnFhWObovL0p;J_dG597QSnE4^QmxsShe1k4k1My*e
zS6aIkQUV<yKwK}fK@Y&KgjR7oq#jh7$aq3DKrMKDH@!Q-_@8IHNxp!+*st7h5G)d~
z5bpA1yucoy1KhxO*d}gMFOGNkHKQ{aXhkJpH|1YA=N~gW3#8%8|BZeh-I-)NtGKWR
zzg|EZ{z5d}y+xvuZ~g6F3k0#4W^DMT2A5|FKf6^|S{26zqlu12SH9Gh;@Z{XtnPe#
zIr`yKHM}3Uo4HE;4MPI=%0c3J-RO}xaX&G<pSZuD$m}Qj_Y;HriHCfZw(|Ao`-!An
zc;p5~VY9=Wo1m-IIQj(FdEb>fe%4h7_5=#Qa6-qcSlAa4eY)l-C5pPf4Syr;emV4(
z?S40K3=0!v=PX7);tk_s4i&El1_N2%%W6QmuITUuC~gf5#CV~TG_SgubGqtKxoMYb
z=>a)z)-z1#!MZDD;Mc1(Ys^sCA61Au2V8Huj1~S0LpKo3p``*WA@dlbI2jDt0G2pa
znsRK{A@R9Mrf$3ugpl{d*W9WeGAoDa&w}mlUE+!0O5Nv$gSHNf>5+C<+v^kx>2kcG
z**a^~!W}FvMoWwLSZN`PLb=PRjl1l45%I~ktGZrXt;6mEE!>OzXv5E&jS0f8y5@JS
z>fiXoXe&;Jo3HaH_q+D6VzjxQ6F%LU)M>((m2JzmEQ2Jk&MLoB#@5qG9oEOGyFWZr
zHg(|i_FxbwS1UAB5B`hSf_P0YA-oi*@d*8*_Uu!PzZBSZH-`3BFgiE;dhPGCe51>o
zGTV5UP)L>vGo`|d(o&*Sm@F;bC=~`vg^^NWuJpg$T}xLRNtXUAacWc5=itB4=TvpO
zdpuq3?kQKFStpQ2i5{s^X5xYs5)X-&aoNV$m^M}+K)i)`Nj8XuVCy!$0ql(^7dBJi
z|1kI7$VjCGND8^n)SO-*@(~#knHd>zBkp&<FB2|g(y(1MAN!5(rVP!`Zdn1V$ucVX
z{4aQ|tCV1cE>1$L&VJi8=Sw#8a{=bpj$eiY)rztgo$!86?hSpINy&J{#Z}3b%RS@e
zS3%G5H@{Eysy-zA{cj|vguYz8R$fQ<Qq;E|4Pysr8HCZA2UC1L{f@!fb&Hug3t1=w
z!HqO1&i*34`0B>Z-~Wc`0cLvOAc-=`_}*`S_S^r^)KIggLg8MoYC<of`dBNJsRdwL
zQA`H2htKH&m#mjKMF$yP9fI41)9_MM7`EUlpRNaGYU2Owx467AFm@d4VaJ10$9+A=
zu_1Qc7e5|YKkgsF#h0tsVC>pZy1wVfu{5UXgY!=y*}(C@BK(UT$DZQh{sqVp@|mYJ
z+fd5!z}@4%Rooc*QKHJYwljA)chrBl$d0;?y4X?lFhw#=u-9|AaMS}$I(s<FV564{
zP%R#&v9-?~t{uKO>PHc$T33bm(>~0?(LVd+VFte)X{g0;VLHko6C4i67emH*fO{{U
z7;qo~d~In*5ES$H^d$_ZFIj>TT+5Rjj&aFylzjtqE{!D+RfD6907XE$zeNdWRs?A?
zlp+O%AjK?H$AXzPbR_r^lF+Bjy;){QIow-AH7$5<hYRqRMp3P0d?#~o@a^E6S=U6|
z>5JYDiibW!UDM&}+d<5eUJm550@uEoJ?ce`E{T^wvRprs^mT?|=m5swa6ZHo1qeih
zb7m8|QFPG^6JH{y-6L`H3`HUk133m8kf7IvHMY%?H8#5e$<#AY+E_T8Bqy58I8hil
zEZRk;1dcXY7N{MY?D|3ra?vSFAQIKRHh0Cs9CNp!oS@-=O9q&z+k$k<5g?j>`x}LC
z|1WI{Yg7EFVjonn!&fn`TMt@$aTECwydgLcXeHaYWNAgQ*H+|P5w3iG`JJU2CY6fT
zcTj<S(_of?q33=WbJXwECd+g&B)FGJu`c%WPy-AGCqu(gPLZhut_XUqAkL|vtH>|z
z4te*MIqi8!Z__(i(a5ywxJRT;RMgKyaEp8ELAa}?_CP?fSi4_=k+TT{5Y2oXhE&$%
z(I^tIg|&#)T2-vpio1m8&@Mle(|RsU8|;A`A9fIjVkA><)>@c1p9zuvSlOblC^eX$
zvI|vZBJ2o%rI~00AP(@a*rx2Vz4;BOPJrD4^FT#-7AsvqcJ9}@4KEu^Oe&o<sxXhy
z8r6UdS_{{kfA~iat}!zdoudWjS8K0U*graH5qi;HZGN>52ePmehC`jN)?Z~wg^r;N
z9Uey!IykrbYWWp;M7Vxk*)sOUMQr<4==2W$8=}okNU|bxAAGU<*D(fh-Tb;?iVqYT
z57A8!pSs$5<rgZ})<5+lC6zu^<=v7N%5Qo6DAA|+6<@Q`$V!*yp~cz7wd%UpfT}gi
zl!YttXfA*WP-ZaDhTC8`>cJ&@NO;uqcJQapL9iR5hs%ugILUIjaz+2A&SA%FgCbD6
z^J)u12J=W-g!zO&pWqM6)R<rKH0wT6H&j@es{kg=&ZpK&YKH;VnoopUQH8_@xfn)1
zA<KVbw@bCyp`#cvYtxIpdh_S%_xaf-gw%*;Q*=m_G#LX9`*@4ZC;jq_IK2!}i*{5-
z6a^9x^BCrYHRFZ(NMUY@5zukTK|^LJmLnsgIF=YkN3x4v5QkAmjLOKLlNRa=>$H*N
zK_DDoDGtqz@}s@HcChsn{>dOH(Xldf@Wa8#!8AHoMq%^G!8kcs4jvthu!Cm@_YM-m
z_cDGkhCdHs@5#XwgHv-1tK7gY_0F(^9}mXa!F|k>!Bx*-KY8%<K*pHm7|$rDhu}my
zBZa9jC(<FUvMD%}wOM-LU#}zCwj&55Cin_0Lf%-##ct;XU>RTvAy72!#LIC02rMI8
zn2JGFWNst$s~AV<j=J6fJ$aKsx~BByy8Tk(j=h@VQ@lvx6*uy64;U;)M}Es|<Y!wr
zZ%|7}@#|jHvs6N+;`1vVl4#i**zwcAt@BU1smTL9i*7Jei~=|ee6PZ%UK|B3`m-+i
zq~(SIa(ZdpSfrQ0v;g#Ps=rb9sUEKqz@P)9A~G#6u9LRnlTiX<45abURgWNYE%3oe
zrV=m|=AWsCp^-JEc=D@3xS<vpctR|n%g=_G*Qd7MHkp!7F|XPl5~H!>KNA(m$6*wk
z^=SdPi+m3MNurGZ3yo*pc{iH#ZZst5H#m8x#$aG<u{1F8EazFcU310vQqR~-=;ZK&
zcj2XXT|XfIQe=g2$(eVN6snz|A8+(s<bH`3N;rvV=1BHkS1-Ez!ZYkj|DUKGWbq-n
zbuutQ`YA)Cy&|uoQb@-*B$>;4=E$oTMQWwhAugW6#Z<BmQPUEoU1BcS@-(q05r-0m
zs8#nC64R3lG5y8m_8IhF=`lg_RHD|>!!IO4sW3^tU@9IrH$o&(@lxWbX((P2SAG8X
zHTH2Z`7x3>pXp@3Y-Y|Tv!v2ODC>_SkIzRJ*!jTtd4J+#&p12p3ZIYOJMSK67kr)e
z?VL?*vGcLD^T%Plcp$;fV~^SSv)Fn6`q@D2Y(U1cKo{xW^WAxacfx1`dvz0gmfbA{
z42%D?z;3WQ40AW!ygLs)gNmWzxK*6LX*A@?;IO%PYOMMImVlRn8l71hp2Gi<VJyls
z8gqDz#9)R)7;+#zElhRF+zqb5Kf{Hi{l~5)CHDG0t}5kdMQM|i7&f>H$66Vv6MiGF
z*VzSMk6me6OM(+T?K<YPFEi3|EdEGgEh1EJI0k>h9g100<B{=4JI}Q!r{gB*O4vfP
zTn>n9*W&iFL(_F7Ar~@*Epz;T(owJ#>O5e4uEn_AP|lT4w6HeLslTAMStpCjOInw~
z^76oetYi3;H(A&pV`L14sr?R#7Mr=GUE0)%BgIDP@FY&!g1Q);u+9T_#)QW1I(!yP
zL@q-~GI2rZ*#DE|4_dHGLwsxHAs)teLkZ9zAGGlR0UHnSX`?(L2FjOA0?eo2(AmPg
zt?nfg?!o`W)e^4xyUeZEITx=P?+M0B`W$-pQF@cut3WE~62hXRh+XxnP}=G04rsBY
z19G~ZJ|2M5Lv(G_Y_QZFdd1kqCF$a)Olrvyp%Q<_iJ;mc+)FTNRD;Y5ua;R%bylOw
zmxS~n^Pn#ZpRnp@%^^k6q!xq-ckzGwYB_$x<I{s4Xp1VMI1DxDu}qKZy94bPkF|gS
zXZg`DsL-|+()Qwb24Nq<LPK9+5&gkT!=nb(W=N<9TBIX<hDnNfhaz}vX{{Uult&8-
z<2;UMsjwKKR1-X5FGc%I3PEQ`i$rsYi=jd`Yn?3mgnhxO?1;mT6V}Cl!<>S~DCUTt
zy^Qg$hsp_)L|DRoYbm7U!-UBqX&!7%ac(@>aHMD<nWcy$9xp!&`|!LlDk)6z(>Wrz
zixg%tE~%G7lO}n@Qik*7QHcE#vjdq*AvM?Bm>Tgly7C(nG<bbH)X7m0vrEREEY?gm
z@abEnIKR~}7XJ>j{QBOOPrIW*j~lX@JUU8fYGkzDQA3J@<&mG>BEsjom{*EvrA{Py
zd|C0D&x2x`p%6|f{pAabs(?tdFLcJAQ9!R_@?v9#D%OOB^k(IXO1Q?Ey2?lF(U(o7
z9uS<ssjO|c%c+0?+aVYd_a;%<(k`ZCGUvdS<lh8=nI?oWh!JCHwm(F&!r7t^atG|b
zQpY12j;K4l#rC^OWWO><AxKeV+ZYi5dl*;7kJeQ~t}~t~Y(9ZsR@jINIq{eDo8gM%
zV}*@*5+R=zh$>9GNJ+N&*8Rf92IaDHMJ~>)tP)5Ie<7{-G&k1hJQSP~g}f*!7*nH_
zi3Q<Fp5++rz@Z0N_QniD^>i#RERw&tL_D2t74}hY9e!8f1j8M;FU<J9%v1H^N^#tT
zW?Sa|(%V>DKJKTlKNlcwc*`)gy(xA9f95oP#TBwIvx;Hxjz?61R&}s^#m{uV-U4}I
z&MiuymDblZ!n3I@N)gdkV(hiqABPM}Cqmx-)b3hULdaXI1tV=Mx=Zr&?0b(E(gR!(
z-v0Okt+?#~<x~eKS4Z;>+0(Mr6)yi269Regr9I`6@#wyXPm#pt9S!*lGqcOw{1>Nb
z`(0AC6^K1t6fId0#TRDnMin@&b+m=RdTDbOMR{{kI84vgYU+4h0{r)DmEXqAN4r=u
zADJ&dWjfB(4JfB*z6Sm!zEmFf8Z9g(Ic+E-0?~dV{0tX~M~R_^A6gSTu25M*EbiD$
zrnSNw1ySj8%<1#6BFhftC=acjwc}@PN>hgum1s*zSyM7<_9A3*^m51~m%el=ms%1!
zJ$8uf5XIC+1mTX&C<-b1@N_ZW)*&gK5bBvl!qJ#pV{_dVh--^z@@g$nF<DMX>Lgzh
z5xW`tB0NY$rKug6XM*6M$FH{;^#jmP5aJU_Lg7Q-rY<4{hjQ@A|8tW@>J*OA`1P;B
zp;hQ7)*6_d@@o93{Ty-eXTP&_IVVL~<8aPizkq&f8?%<V1B9Pu!-dSVLN<zE(`+X}
zPGj^tRmfxtS=f#jGEWMbbwZ|&7BZ_YplUa!$vhyq>Ix;R4rVQ6`U{y&%6ZW}R#3_(
zlpsahV}&f_{}3{fxFzVkzp`v^1oQ=smffXj0j<L}Q8fg~8a7DQS^J++e7h@q6Xxzp
z2xfRb=$Et(jrsFies6v&z`VOFKB>rfF3gI92ECaV_=+)P`ymExi*Y5x6<3Q!++A+0
zrOPciyqZRyZ6cOR?Z=#$ZaNn(h4RXedI<1+JNPEYifG8UVf3lz(Cr;7h2yce_t6<Y
z!vwIA9p!jb>SZ8@H@!#Q=$6NeX5r_k`|bVOD~2HAo1XltpS9#yam;_-&S2)lf}Z?r
zkTo8^hzDRF{t0rta;&kah8e=+p5vEMX2S0eQZsZLlzh58b-KO$^UIBs!JZE<yH9$;
zr%Rdlk>tsE^26%O4=*Qv&h($|+&hhP)4B}5%qvn)y!`)kJ~`<eIGGxSqCeykAJ)51
zmmXlb!%&$iyY%%HDE(w&(_Oj?2hHj!9wsASBQp9s)eoPNPxD%BmLF!HUaL_-I<ZDI
z(4iS%2y)=GUc^%0ArQ6W6}5aSeu$di`u{*A%$ZR=AzhcIzNJf>+XKywbw%}CfkxfB
zjMG&nvK}V2O-6Z8{dIouYOp&Z$g3wpaCiTYh`;V%JdUkbjhLT0>PPs^QP*L`&`iWL
zS1lal)~hE9)Rc&@qSe#YQY{2VR!>w@PVF@$YOdue*zmLJ>1wGKMo3ppB!E~ybq>P#
zaQDq@<rj{~r|NlzYDm=Lt0hq_x8SLzPM%uo<f*B+THdx+OXEVMMfF4&R9rokSwo`M
zS}NtODN!w7tL^PRo0&TAPgD!ct(qJ9RjVf==Vpw!#vET@rY61G7`c6V1Rox{l&i&+
zR;>=XB&w$3euC&VT2Rjkve}B7y?li{hMZkCn{4IewR~%0m9|>CP%R&cQOh@J)KV>6
zII5ngmKJ2z(y}>|RE*}6Il&>DsiHoMQ?BYc*U|%7wX||eVuWt1<we+ba4qT3ZmpgU
zn#hf7>V;%(6Y*P*%KmusZ;Ucy>i4D^Aj_|T!356K{aS}YdnTM>YBM6;Bm=FAx!X}C
z7$21p1h$i9I-m_q#EmRaP}PnIpPISq3MS%GCh^({7r8qWGQnd`IPv>LJ?ys^sz047
z#;ROe{RI--<utULA_T^+Y3Z#^yKe2O+$!HteN?&sR`-Oq;Wv86UR>LIf;d!Mut#s5
zVJdB6A6u{91{=Hu7hAk!gvakyy7rp=wS?}dP(r9t3Bebl1cHiT;5Pdfza!aig^Wp`
zsA03pD~%hxv}XRH7C%wej!rWJr#}2EmBg-p{ds;C3bV23SXxO}V3_W*RMRdDOEV-(
zL;KOy<`5X}V9pbT`7W-$I^t*ua8_Q_OC_{A2cFM|8|c#cn8;v;GFT>5c|?%(OmZF9
zsK|!!=B0V0iQYHI({d%)Ioh9w3nW3`uG>Ozkb{CtaB)B;=zT{y$fbcJ<2X8Lns6%9
zHt<C#tCpl^0`l_Q!8GxGxJss~Xb69;mN%|qhp*u)E;c7ap(FKtCTIeG{kbSegMtvX
z3me5JRcy5#yc|?FGoT{PR1rx$?)!oG$<}(2DmXHaSlh@Bu?Q#LMP8f?FQ0a8zYEW~
zs(@JB$RdGqWA7qKThk%3BL}c`y?n&HawcF7N#)eR>$EWqZ#^AFv@4!`YGY+dy)+>-
zAVx@@G{KK9*sxo?cv*mB7m%wMVXxE0Oy>&e=N#V4@w^c_l%q5<%>(BMcr(sCfn8%C
zpNyP8oBAcydDcILnx(7HT@=J;tDV23x0yIQ&OY`GAyl?M!_G(VojrW`u{$LTh8%T3
zfb7!A10!@~ultZ&i4;!XCC*|uRdK2=I%4-nJzSiMpqd4jHs*NOg+d}-@<Au)gC2ua
z4WyrpV#*@~VoQD6=c|`is)+qb6KrXS5JD)S3FRU_XpD|MwIw+b)`Uo=1_wg)oFqFZ
z9Ng28I5AxhXED~SY#Idn_xKjVfADAow~^L(p^(I<hCrbVDP>O6;pM{YKp{D5pUD-H
z&zVTT_op#GS)30T21bV(>QH=v_6D{WQOr9!^Xtv+w>7VR2aiJM(~e!0sd^35s;kSh
zr)+<@ICksB{2J`OHow@W`Wjq)U_K59WHBt`<h@vZo!jr`9uT5A&T$XM{^0BF{ejn6
zMsj@N{TK$y?+=svn+~Ig>jim1a<9`OC3?=#p%Ot1w5T)BfeZiH@Rk&0Ud)R-*t?Jn
z({`r}@AuGM|M9P1%X^xcWj40Aw15?*2#YG91zHS#0H10I9&(Es0y8U6*UK`Ml1~X`
zC3l3jC6}0Vyt$6bWJKsr;`og|U+!O?6YR>>uIM9=GdY=;W3Yc$oSks|(BzlKi<enh
z22@~9c)FMBVPsw}^~iRSk9LmemdCiKxq<UEr-fD}Ot|~W-z!ee(LDUQi-y@Ibsix%
zFn1Bp*$mG0yccrmJ%F~=wI4-?9yZD@W+U`b*EMaoKx|)xvfAVgn*bl%C_{O<53hLE
z)#!Qt<&D0cdfSusVv_=M<9*-k$;!yP&M=xD(YceQv7fVjAI7@hKZ?E|U-+=~=rs33
zo#f7t<H^g7pSLoQJ7j&DjGPQUf~;M(4U}@SIQn5TdAhaytFt}(5yFn@TAvBWikwTQ
z$y8d5YZz54eJPc9O|hDw+o<o_mVg!X$$PQ*#R2`2Qe_-bx4{;^g5&uQXf>ng&@Fhh
z7#v@#Oy9gM(>I+i(H4S9c*NEd!a4aSOxx?x?XxLFN(vdKrGW@@(SZyI4ZiXEbocUO
zJ_76EJm1i*Mm<Dx`x?o66;su?K#u+JBnA0k+O33|JX#yg%`wKbNrmJn1X)qetBj2H
zwg!2C5x^)gfWah*+C?=tOkr-Grm8TVO_!+JhzRca0inz{F0aNnUr20`eH!IaG`3F7
zB3Z6+XG&YUDxZrV6^Z$HW5t{IRlHNK0yFY162~cA*F42g(p;@;K?GdNXzq<r_+Wy^
z5{j@viY?rrh{#7YTbqQbQ%xE!)!Z!~2M5ppzQxW`!we@yz0c347ck7QR$;z*ijhhe
z^;t3$C$>njT$7(5&^PaXxxExGv%`vkd${nBa!26E%k|BR5QTJ!h6+qGA{2?hy(*h9
z8<7|9o9CE+m_L*#qb$-Sfh+r^$|X^1B<c+MNZy2LEEwugn>_lR_B%*d!<v;qdy}UI
zhs~Fv?=0*Scwa1#^IwMiN(Xii)p~~uG@JX~V3=gNRc*_Ztb=7lkfdtv6*3kF%8-(m
z>xOU$<dSXA+KTQYpCJ7=Puk;YZY7pWe<X#J`4HDrFA?*QqExh{$6s&0Ua^HaR*R;d
z{ph$_$0LgEgj1RQo*KL_U_BuOX}JGn@$pF{DZ`KWe(vG>c<eM5J?T!v)z#$#e|O^#
zw;I^3KY!K0ZsL}9>vy+q;J@qNG|=#_-9prtYN8sn(|t;7xZ%)Wv8bXcKm`g`t_jXf
zCrp458AG8Sw?vgX3QMA_j?)i_3vA0}M~ui=8+D5K5o;FA=J%$+_=vOo3$#hJy|}si
zlvTJ9Fj=tvl75Fu8Cs&9+*HjD=*zs<O6?j+7evLE&|VT2RBHR+5ty9I-d>qmB<vs-
z%Fi0AVpV*WNQxm>8k$g*o0(%$@n%Nb#Ei2pOsSFDtSw1LLgiX<ON>{^UPv70v99Ij
z8FwPoEf`od*n;rq)16+Y8424=wetIreI01$VTLf7g&g7cCT&xm^#A%d^bh$mkaJ@Q
zIXBP>TqY}Uxq?wh%o*~W8I@myTP={kjP$smuvnnw_Zdl>QgV^LV)dv-XWh68fBtmv
zV|d{*>r<MeAVVwa_h@x}p4Qdx^2PTC=4{diSf~q-);^pR_UZp}cWupW9NGD=bmb*^
z2)10Q`~k=1byC?Ru9cmKyfuL#FvMU6o(pO4i6F&`meSIeMbV-u(X=QLq<FtV5(Pd$
ze3i(pawP2&6pE%QqW?qsobDM62y$k6hE>_*1=sE~-93FdmpR|pUah^+F0?_@oBv;Z
z*IQu=Jh*@xk@aCouYbr2bk$|C2rH;3PxBRqOKn_6la_S;2e-{rw0YYgHZY7l*=w$X
zJswR5mv;RJx64z~dAlI7ccd}Dh8e-gS6CSj)8AuK8WKxY(yGy;WR3GRdeRfPlc};h
znXm!^$3tm7J^iMEI6V0V8iNB7oSW;I#oCgs5V~DRI?jWJ?0gG@-+3@9_K<dP#7)Z@
zls7&k{ocWI^Nel6+X+cbLy!~I%(%LNF78m{B>@NF`9@6BXbGIC==7Gnlfu#o63gr#
zJ|rj4zCC$5b;cHo<G2nv=E?ER>*9*r>+g3~&X`btF;48X+G(YLlsck2+E+DeS<Ghm
zx+Gmuk9|p2^Y2qMt)(Pr4t>=x;3*8LWm7Y&*lA6TS@WFyjbMMfYUxq~)U3(I$0#tZ
zR+@j3RfGpy5<2(|-EPY?`7Qr3z>yD+oi4xLc{B2QCz8XzA}nEnFyG&J_&CswkDQ3*
zHzRL`ARq0gH$#D?AyKn}pjlT%Xe}_Ug`=rATNO2+5TQ6^l6;{S9eh+y(-hkb<{|`B
z9`KUjHteqi#tneCEN`lvHq3+25W|&p4$4x~Rsz|>5Uu&Um*p30FLnVS8Bv!95Pum^
znft>c62El-^e{O2gxPcdteRcauJ6`&JJbyM8`j;7nhE3<Q7|)&jbF-Eiw4byrJ~>!
zES0UKumH&ChNh+?1v$X7u5c?mFm3_Rus2_gznXpZ<EuNb?naDpJh<j!IR#5+jrj+~
z-e1et=dE5grIf(2fT!#}$x)N0*_IM~V<73|A05-_3@l{feQ<*5?$|B<gk95S*Hj|~
zRpyTdHkjEQ9yS?tpHACBTXq}GSB};gH4-wm%B<0_tP|Mt0-G+d33>RU;@Vj7P*!*_
zWnCcMam+cFpgu~hB;q6i95?7eU<yiyyMu&NvSB|XTo+))*z8~uVONTf+cM}g{Z@Ss
zhMgs?j}nQQzmAV|1vU`Ou}^xL8K&n0?=IjYv5ninjr(_D`?j#OV8nwvjmD98G#!ox
zX09-u{PX&fRS4nYViITJF)Q~elHk%VdI+@D=nfnM59uRHS+-=pC>1STwF4&|4+Z^^
z!(;fcEz0;fQrY+1(niG44m<<Y@7hP}5g5Gww2o9QR{^UbEOiIb5)hi8x`aRn?CX4^
zdOtKS)KotL?}spZj+Z9hJ^L;~IENKHzSlZy{1yl*YD{@Bnc=Qy*xsvWd6tX{oCMv}
zR292Kj7|bV9YlqPkPa6h^Mnys<Q6eIVhLZlGE^aM3}c5g2sNMQ;1n$j_&V!-zs-xa
zXJ2rlAt82#2XI^lBB;2CR7E2^4}`P4tO4)K+yEEf)X#W`rHnL69#u?J@08BIiPZ1!
zu!nPfUXMA~%sO`J!cISI>-0n3X~<d;BHZpmD=z5nc|<v}h<E<KcQh@I$r47+dW{Z~
z_Cn|kK(}DsIHa4H`zyVA{aHFkx@d1HU0-6KDCw=w6je(}=Om+MaJ3XNu_&aozH5>U
zwaD+i#oEm(d?6Mr@UocX-&IBHq?(O?T`8IXPp9)oh4OF-IW899ZRHFic(B>-35S*m
zDKeFfq)i36M8tlPp&2NkW>llVnp%pnSu~Ywixdq2pQmhpvTW6Mu%w&mV}T#k0$jcN
zd^a;AsH$PxV6Rf}p(OchbJcwbpHJGs*YD1GL}?sfS22TU!52mo)Db+j$4&47u8JDc
zRn$0(1rN4y!(hf0)8AN_IGfkQt4#ANx0n;`vox~<PwiRqVZc^+;Em~{I~;-dmiufJ
z#sXYK+C3MYb{aDRFn^#Wsb|&ADVTA~v(FO(6q_I}?H-52K5Mv0rvLa16I_A()Dlms
zvn7BtkZ39#Ru=fGQH~*-`{S7+Jb=h%A1xNyHDQkRvf{v$qKT{^Rd3FHyZ$4@81edw
zcX`QJ;>q~xQF-C0fAoE&@BPBPqyDRy3$$`HumHgP10zS}N3E$pH!*m~%%MIkvjIT<
z0=9qpsK1O+N59D=6HCN!;kugzw8|eiTf#3pZVYgP|LTh?x)#<#)6Qx|;BO}VoJHEl
ziwt2Lat1%&@S*1;pgxIW)X75WPb_|zUL>cttEaa&Pw(`R4~x~)d*2AWd@nHw0P=tR
zcItF=3bKi+tK@X*DrOD|6HzWIIfxMCTaC>*jz_+P$wE8A#1qizWfvyBiEO^=!#($+
zjpcKzZTa$#R`^eA7^S@LVzj$}>y6Dxfy^#|+{<q53Vz<e-!}+0n+x2*rc@i7_wef^
zUcwKj9wUJ1Zd3eq?(7D&6|_6?zj)TtoB-s;{pV~J6HiyXjaRwfg4_*TSs%}QB(AMq
z_6&9QDh=AqlbV+Bm4hw=r126yVz|R0k^PSt3Y2CJfxPF!Ay2m(xE^Hl*=(puo<-%#
zxPK1j@_81tY}6D&n-B)Qo$@|m926p;+xWWWagH~ibF5-Q)hePYFJKH+6xo8GK*}U>
zv#mtt6@l_1`)Wqig<$JM6HB>i%1eh@UE3b=oO(+8KlHF&T*DZvNT%u`W_@FS7vrs6
z&+qh>@=T+GXDn4_XiKScEk-}rorKlZOWiXJ0MgZN#kAP*b@mvuBx9;Rw}JPlVGWJ`
zHB5op#;lWSYhKB!$cE}eQq!mG?_o&9f_#;+gPp_CgD1=wmf@-O8~RLx-8Kn3T1CgH
zFgBU0W;4UmrW#>tU!D@HXx1O(i2<rJ(`GvqIklBL4Hh_VCdOcgc|aEfynw>FDKIb5
zYjEL%rRTaGd=3W>niJEbHt>%Pf36h2zzA_cHdj6Ata<!b*Ned+KY0_D=cUu5yxgYc
zCcvf}*yXQmFI`>2QahL=RS}q$=RV}Rj$al8_-1R_Nz9hw|4n<5<#QY>vhie}55$|W
z0Ac+E=HlUiZ<@jv2sQ7(1V(gBOtM0FYYz!=50Ees2lJVmb_M^+2bqtjD#zbkJ(;`l
zer1U%Wyd$KzrQ;9%hck#>eBn#cgKSdL{8M?RqmIG6IwpGRe4t%dVgp9<eMS5SY?^%
zexL!442$JY*yVWs6=X)?wT|zN{_h5rJ-obhSKiUplv7}PR!YPKqeLS;0U4!>b~`DH
z8FY#37YyA<B!q3rov{RDbtlznDoXq&coJfdL{mciZWu;ZQ&MTuupnENrqUG1XX#}L
zGd0zYH#}WN!O)lysK%<D&lMEbj8q4;lvn~ZwS%2nu+HM&VV{iH_t-BZ_FdNUniFra
zYDP}AQ`E93ki8@j(V^&Bs>QpRTtY_araH~Woe^9}*69L^KZ%Gu)peSQw?ept0tG%K
zjTREgVBN?i>4&Zqlid)*R7**Qq3Gycv?ECiY+6vYB=r@xE}U2b3Tz7Kv1;l$J*89K
zDWtLnyMyUiLX4u4U{f)e-2q?)u__m79)4kcu<|+BU}JUx%pw$2wBz^0V)vhRlq^ft
z)pWec2qJXHu;R@Fs0}=3PCj1sLONAY*bX1Bwyl00tD>EwYRZDMo~ePU9tuiC963&!
z>0qjr?o_{4GO;9dYYA3C&sQaOd8TjVlz7_+2<TOG$?ARtXuCj*De3OFNzk0M%FYz{
zx5osOwwTh2*;G2$tvH&}VXED6dj(Pq$5fLvBdgJzl-lG&Q4l6Zp2qAz+R+M*m7+|2
zG?<{YCUw%b$w8eMa9t8h%Fu2#lU$0)PAyH<6AMx_CPk%*<#d`<&!<dB$!UpY<=AFw
zsfEOHAdYI5q_o~P83ol;Y4WBBF=$65m8H{kPE(n&o>&qa<RVyWNyRWFj9R(8qSM|>
z5oWQ?*`&!=NlD2$nFMn?n=>q%dt2C^3QXyWv<Pkx>jtxdQN^Jqkcr4CjsfE0nMBb9
zI+rC=VaK~slsn-A#9fY*tYVni%exUdcHAmGmm~~*7ndsQ((a{3V;<?}O1iXWN!x*;
zvM#S_xwuNq`JyhZVV5EZJyMsrsmXe{g2Y_n5=VKLGxt$cf=S_Y$azeBc}GPW!{g9e
zzo6>rxLQ5bFD7k3F-Z&ZG;P^zy4p#qcNm70seiV9MoAmsf$WyDxZ9+tS=FX7_dG6w
zX<1gxsl)|n_%weK3EYL0K!DNNsiqSRq@gFO4sN(4)tQQ!YzV({%tS3<3a*l@54LJ+
ziN+8G#GT4soX55y{ZlR`d5#<-Z^XSHLPU=08SvrQv4>R%Dk!kRAv;e=+O2|PBphH%
zRSKp8zOGnO95bWZDMwe~Q>sVlh$qJv4b!P<8ZlLFopvgUh0RE?1?jX~)ug<}zPerz
zpu=>mxDD`x6iSMrXS-sai%mJPx3gwY;H$X3aYfilj%`UX7h#d<7tDhFyN^ElD4qa|
z(wKA8qJ<L8C$`Cjm^gvTHX*SBjcrn-<9uxr64+-DAOVVTO*OQPrWWH42osYjIg%06
z*7|v>m8hOT0Ay&exsgObHs>V0sZ5ULm`qH(`mLcOHqRZoSolSf@~W=KUNo(uG~;qQ
zr!z%8u8Q0$spy!hbt!GKXb-hAa^bp*WZ7CzR`%hRAX!PHSLaGpt(fN8xzaABuTG&D
z`^>hP24hn_1y2;K%=U2NZYbSKA<;qS1zs;y>AYW}fP>h@i@?9X_&f^UcjsS0$yA4u
zVcKp;*#ed&Z%0<&jx4>sR$;FZAV}t3ue=!~tYR1p3CNa*6NfQ<z43bO&B*Juu&V&M
zEf9hO55?iY!S=zk!`lZ>LThDt+YZZze*iQlcnt$D;eC-rEf-0i@#=~JWR|Vw?AL(O
ztSbx##e*OY8Gy8q9S)ri<hq~rD5_q{WWuBZTnbFRXn^ccG6DiSV5N+|sJ$F~+4o}S
zW%<R{%d71D>Wk%<{bB79biUEnO$*d}m|#2V@ByL<l<ZZ^4%O~u$8sRh0MG&dd7dq>
zwLh=CUWfOvrT`2JjAW5&-E4aa#GUa&u7-%sYSBj{EBYvdIuGbA^RFte7Q%$*R3SV%
z_&jh8%PgE6Z$HzUtX@OQ)@QW(Mqv38m;dv#pC9~Z-_JF84MT^)v9!a-65wG34r^FR
zASS*7(+X}84J|HKe7)9H-_Qf6i4O}lL}(XZ@FX%QzbOB9VCL<>t+xX|z8x5MKYSks
zOM|oEVnEd}L>j0bj>C_rG~UWqtx#|p$+-zM%KYSM;sAz0+2AE&LjSilFAAbR0NZ@7
zfNi9Q%>cl+fwg0Ih3Oi)($<=#kap-=)bm6)A~(arWkWkuVudamo{c!xSbX#w%qQIY
z4dw;z{f4I<4r2~tC~OY)TArF9q-{mpw?k)?@Zy+A7j;iJji`G-8-YPcdCOfiI|GfK
z$;M7!V`q#swpZ9&WBW;Cdy>6|A@)Q&dx78jift<UHC@ezHtQ=^I9GpUc9aw=w6H0+
zihYPc(OT|-0!1s{9d(`9?sSYv^MG|Z2Urs+H3}F|^<eX07fCEO4r+heIH-mVF#-cs
z#}N-`uJL%FQGL{Sd<{W@?l-EN2oki;K8MN?x9W<cJ88#qq{t>hWFrxHL_-CLz@sfF
zkw6EHmLZ{l^5-*8oGs55?3V>i$txOD7)g(kI@=(BYTU<~X7pIZ&MCwyS+-Ijwn1px
zCKO<eNOOWp0~<{Wj37bRFbrTp$lx%Bpp`Xpq4h`rs|G6Le*jQ3%yDhJf-4?LHODd%
zwFVK_ri@pbWfIs=50PpduP=*HbfHzpp?CVdmQ~w(+6KMTiq0dxRI#xfAZU)m{9Ho}
zpmn$UT(b&}%A8y_cbI+pf2Sv4(SBJl_G|kiPL~1@IQzBwQa82P$LBZdOE!IegLKin
z5pT+Z!TPCLsBdRAHJt<It^&D&>)~$+)$qU&#w_V{^sK6w@|Z!2Szxvb6Gto+BvV*t
z4mC;IC>Vg)WBCqmS!f_8Tw#mW!Bt$ZupJ@u*EFl5aP~z=7nY$U_Rl%SA98Oat+&U;
z&6&M<krkKr8w%IXqV`mMG~v_up8yhC@T$O13lkhUXbACy;Qt@kn>57FtV`j0g9BZR
zHy(@<B>82(U5DS!(eax&J*$Y2vCx9@GmaP-%fzMkuJEF)25ax^ApTHBh>clJ#PyI@
zl|=UWC$={O+D-vN<T!6iV%}txGqc8)o;6?v@>$b5Co?KYE*ho{PrbnB=IoKZ1|pO8
z?;s}6(9yt;N9F6}tUU=7aOH`k^21-|r;*Nk?x=hNaC(6CdkAoS;NMyt&JTdmY2xW{
z3hxJKQ?77Sotp@$^ANs-Sf4J~&jg3+sUWP*DoWf!lpoIJz0Tzh3!w<gQ$d6t<fju%
z;^ih+9h4iA#pM~D0_2^+snffgq-pkkn3*CU#_pWn`}WuI?>~&O?{~?s6C<aS^RgH`
zH_`ko6OZEqNjs)Jyw!GVb<Ks;DRY7l1o4^03W${Bl6&l(rT1oq;Dc9mh?jee@>T@4
zt3dhj$=;H|QQ#0d3obU!ea^3gm^SNxX>*|s&DT8tav!&WinsWA*%faK@I&TQ*{|C!
z8c(wb+iqk4%D8bl>muX`@EU?hHDbQDl6-IooegHcVj;m1&i-8CuS6i^02Iz5N7*?G
zg2Tzjm+`V7<slXx^tGWJ0~Du<&^E{zURmaJA^nYcvG4i}XX4@Ga3q^?#I#u^E<R4B
ziEguuyx|YMzt|VnpUPtZ#gT=(0kvXRQ+re@Hdvzv=mKf_`5mrNSf0Tyk8I<f`d>K;
zF`5T|7Zv+=A?~S<cKI~WB_lz|Q{e7C><5#zil8-s@v%kPXvC{Xu?*;~{fE~HP!Ixm
z%`mGAm6%zCs*d0AJ&@tF0aNvN^#?XEwcuESQj!H!8_OsV1r3Unw06d^Yi@F27;>?J
zi*(wYmE^ulvR(5~Y}g;HMn%BZAZ876a*ML_q6R|K3?lFh;?fA18V}-T57B4<_eL6!
zrt0JCo<>-j8^kraeVij4q-MRbHpB_KIYG%J7Fjt1F*C?<Z4j+y7R&X!T;G)#a5Hxg
zR)L`8@D`Al0DQ~_z{>dM6wog2A$$DL;UD=t0&tlDPP)Af$TO@+>->?N*)GD&VEO%W
z^>}sYWaH_{)9H85rruRC^4Bz*-#yzqxfPFYn+dP1f#j{V_dh-2NHgpbC;j{qe|ZEY
z-Bzbvl%~-5x>GSNn<fofXLY6qrf9Kfoef%l_OJi;FQ1Z+i$!KP;p2vZE_Lt>@EYg_
z;Lx)iOqfH@PJ#s=mC%A4x30WFAC7Ii;B6*2EModGu*K4}f-ssWHY~=8g;F2uJ&K-T
zW?={|gt$bS0=yarkNpe=HY}#+AKmbFfyE}qq}3+#L9{M69tFatbIWN04Vh7H>3CL+
zr%0{fe-mM$CeeDp2vxzl2<-q^4ZH(K{XZeR=fY9{8ZrUy91YAJ4FpE-K=Bvd7uq-1
z(N`jK8}QnvcfVz-Gx_xvDAxbIQ~hu+aQ<-3dBW9CM>($Kf{RVLf<H~PLqUcGctlqo
zth)oxabw5PpxkybV*~Ngjiwj@oygR}>BiD104Pe61PDo$ZxF0-y)oa1irO$D99Fpc
z78U^!3$05DpA0SwvseM$0JR)padPblu+xxfxe4nRTGMX!5%h=^7lCd8Rf%aAnJyZ#
z{sbQN>>wT$r@imM_Grjv@r1911MlMrJ&g+727(a+u3Kn_#Bs{|bo}HNJL^s=qsLFi
zA~>CSrC)}xiAuX;7dw)fB5MOIJ~*5VT`LiG=4D9x`5>VQcvL<e&wtpj>Cb;?`u#YX
zIOP+mzC^lV)3)sWJ4%Lq!>h`=|5RNezgN_pLUQ$4r&G<6ZczO74CKd?wyj>${r9Dg
z1B6)I7B<{F2lyBj>N6Grwkx$468j|`fS>A?QefHzxc%CW3j!Dwv;d5R{0dfmUXe!t
zulg{%E1sW^I{|08hi=Pt;?+3BH;Hl*p|nx%7-Zl38w-!6y?l!<OK!jSnY5l&W!EB~
zB~gzyNq3rii&aAXXC5B{$D#mN&(tqucY=GNNW0QTj>=;~=|wPW5`0b`^*<HFy75SJ
z(Wg~&p{6T7XIj%0wunI1==*Y^t^toa;0SJYykXsbtat-~t7Yyzy2I{$b9#T5oc1rW
z^BP=Vf4C*Cemef+ueYuPOYUyp>D|rK!SBh3h05vO>!<eyKa7ri7@dtnnp;C`>Kf;$
zZEP-kt}5;?<467ip7)y`=JYl6oOxQno{K4cj&qB|Q6u6YN_*>N%Udt}(*pjn&Yjwv
zj+#5o);uQ{iKv(0>m)~x=JeK+(k`$nI;rM_Lkq#1IaKtbhxfe5k)H9V$Bm_d#^NlA
zuJUl-*_g|w%Q<%C-CTpH{D8Y^7O|ICi3jn_NpWXUSsLQt&Qq?Z>tXZPD7qE#oL$3+
z4L#0XXv^SJtH=?tfGYMM3NsITNV@m=9YF4P_IH6!x70jayHv9OgK1=p9@WZI>62CH
zZ(kPJ8KiePf)bbd)j7pI0I6!hHkpy)CO}OO+!kvGHQ-zYaNNDa0dR;h9f`v<qsO8}
z6Fhh<-4ho;8K9Cm>hTtLIB|~Kc6D*O1tz&kggf1YQ;wu)x+utqjSN>G$NyY;G5k>1
zxHgJTqZxD>O)v$N{11Cq_tZ9)<^L7cyv*!_AExJdx2kJ)r*>+$XQpa)w)R!Fg{@#)
zUP(5AC%z$p4B1Xh5<;4$F<?Fd#>C)!01s@x!(+dBh7i&n@;~f7=iI9gBO_gruIZ`T
zs+5DId#<jetE=<zJLiW7*3ri_g}WSY5}U)ax-8*Hwy+a$OvP|S?VQfCCkhTGe5ws3
z&BbTwD8SNBk!!<mTZ0#`M=xH_oVQx^LA{t8Rj%sDaePbAcXO0=?CaI5ME_)*N=Z{?
zeHhWaN>}9)1r^lEQpA)BPX}hU-_GWf#%*ayD{I(&S}yk~99rxV^x8%-(5DragAv{+
z|FeJk%iY<(OkT@fi76MG-`)Lg{JY8T#v=L7!ty1;8x<loa=mL|X$+x_Qfib&k$UQn
z&?p_{F7WJu@ODv#rIjKyLiGQ3_>%_Ae<a6XxGyjlrBRfzgOwHDD9V_!kxm!oF$0)B
zyiv}G(#np|=vI_j+#9J;lsB+9%D@589@+>{=_e1M<p0UF`27GUW+eY)7<4<lkrkzt
z5#@ZOi$B6B^AyvI!+eoZJ&=)Hj-idLD6f$fWmo`cj_%A1b5({n%0_7v<pF_u7v3oH
zXR#dRj(UBdQQe+l7~9__t~T@Pbfoq%lSWmtdM2CeSM-8i(xILa5{82Iru0C`k~BNU
zRS`uily%c6fO|+1aVz3rcOZ%e;jcLXsNErO=eS^ftO6_+xFGihgZvR8b6uiQKOZOo
zZ(RiLAi-@nG1j)}Ku1}_c?GB_A>c+J(h7FJPBfE9I*Ox#cw7t|tGGmVqK5BEJFvfp
z7)2zm%zc3dK2!uibq{~{9R|!9JTM%Lbj8uu1(FzV)3TuA^k4T39Sy$x85|P9Zdw0t
z5D8nuIH$wmDUXPy8*)o?rWTNHstRPql~r5IS-t&A@!KcI&u?k@!tolSS`2LD@s>;a
z$;G_3kz7-;w5;iw*+8n9)CwhgKr?$xjco#;R#X0&=6B*x`HYq<6*7|Eb2_Pp#Fp&U
zFw6q_5wJbm(hWt;*(y3!oxvDNztVHOm?8#DW<qGUR8!8YE>3IHdGXaOpzjvCOqNVd
zErDaBte15)2ke?zW)V>XK8zkoH@k3{&nUop3C{3M_San`!<=hx3@O47|FXS4k3iar
zkBrmnf#2gOT=2D51rT>*620~J$aAj%-R8!aOXA5{JJ$sqmpkka3HM7JoUw4YK9@Uz
z=r#y2yDof@<Stwn6m}IK0p}e&LY{f<$fx65?d3@a;ES-nPH+DrdL;!%9x&J4AxJOz
zJFEEHBpjataKGhAN!LF;wc{Gh6E&aIaUzN}y+>tpkZr2q-7<j$Ghnhx#sawGe`x#r
zXJ4JST4zHW7f)ZjU)jBw8wF?o$O^^*_7G-U5nPyekPGt-P-%`#2nNkb3=urK__BWf
z%?RRc*UyIU#)RAox4+N9KPK?5+qk!?2o!7sX)}G{gfRiDU3L!$hD!=H5H5FxxHlDn
z>#enSCkTkEctE>VYzyR>#IIQwKwTM3o1=$cJ{xqy49IkO=irCkulc^qukI_CFXul#
zd3gEt(B-q1^5I?`aJoaAA4mR2`S|4V4~>KPfrlpYSk?(4JBHhv?dF6CN6d@x!8RNU
ziaJyT(Do38kA#N5;NWw8r)agC(}2Tmj<jE__<P>!;J#!Mg95_4?cv&VTdZTG#5uem
zq9#kuxqUW=WH5uzIe2X+D4JIlaf)Gj|L1Z0D}2pswC0RsOyj5{=1~#AUT0~kuHeQa
zCqyE-mW<Q5B!eCzb}{B4aknty;V@VpI-!piL_9X<;U66P3Xj$3z!vUW_mXV7yqf-)
z2@kF~u;Ib><`W{`?1TwBC)^@#%|#5D2$oJrqg^Kvfi#B0a_XKJXW~XVHWA-D)UH(t
z={ra)nl;?_92qwUeZ-nh)MW|-Efox))KM4!$yH|;2`(a|Cb4l63Fo?lNUjR@PS)X4
zT%a*s67nOnML*Sli>Xh5G#==`r49gra!$nnjNNPW6@c>54ZIjiv6L(7c1b?;!aq%G
zfHn4fnw+nbn%6VOi)9V9q>@!SUIcaOrr#*X8dm-~_)s}%og4r|;Yka<`3-bMG!@`6
zJRQWkE)<n%63HfnawgcW-IGHFfv1Njucctfeh`l@408m~VTiCa@NZ}Tng9^5b)~+%
zF1|y|n7h1B(T@W}-a@qU0-}}Ys5kcCFp_RIco<2XNjjX<H8L{+{FHUJeAKJBK@64^
zC)~sKjuY$%5y(S`D!xmi0R)&$jO;A(hWuECPT+)sD#(P{z>!R{oeD~f3)pi-;Fk}P
zF7qBCoa-DI4S&ci|Bf$#X4yVWCLm{pi8KPx_Cuu4M0QMM!aRTiLd1o6I!kl%^pQI!
zu?xV|v*vGDgV)dzNV7jrrx8NNdnbT%^W+5&Ox-zY%15DiWa#ns+t%RQ);g(jpvSEa
zzimMf>fPl2`OMV$@&S-sE>3^{?3;@}Z-bf^BP`{d?0j(QeCy@A#}D4N?!LP-#m>Yx
zMI4Nui4?-0N~lUrnzkFTkqjECcET#^pCwegH@0lbFeB1hc!kSqR+kU2sYX&wN{gjb
zQ%@SDxN?b%-TPT0Z=_2(`3v>*@vGQ<WVOiDAzm?~V4FrRk%EtvDJ`C6@;%a0TE3{-
z*?xIBBO@<U)(a{5`$A&zW3)ZYXicyin~QN%J+e0$fT{JVIre1P6Vm1H8<V%}q+!_d
zl0}m#J?TVI%}dK>(lKPTv@So&OioWG3Yy(#n7w*I{{B!9Jjq%>bi9z3e?q*6TuHOp
zZO+D(H*RZj&z(|psZtJh#qA)cT0qQbY3x6v5W5#6)f@X7jGSR6ph#sV#;Yd!DA;N$
zYH`ae;%PmFDMP0GiY7X+dJ~8(wqti;q)K3L#JvV6&Y39dT3_s&n9*z-$hs|CHEk^o
zBZ@m1ojs6*oz?R4>vm@c38aX=`9Km@e?DpC^i<rQ3d{&pbL>sAOY$lh^}$>8##!w;
zI!Kg?>`?4yZ6Tclpo!vkaH9o-ODAsMv>TkUz?Z1y<M)yc1XM+nHF|8IMU5+tS;C22
z47JGD0lRoT{zt%uEbW3C_a5Nug`9lsfEh7K^J*`B8dQKcjJU%9BkLw>7CS$?k{I`}
zl*+1RUX8IRlzXg%+7n|%BEh(4G*el<9AnCYa8D_Qje?RHLyaj|WXqKCSv=vq9rG?3
zN$qw|TwyCOZ@=E`1LKoUWJ>a8H&drmY9Xnari!-kO*{BioDPbnal4-_X9Kt7Ut=b2
zTIEcvfm>=pOJs|xE{_HV`kho9y)SRDcL|q0`D=}BRSLlbGQ-DS`JHYot&q0mZ`$w3
zZu^3?{scTa&FWJNi3}k>Wrvdo6QH1>8lM1jGRDWyFjMj?)F0^BzcGyrQa{-NAn;?_
zII}2k(%DTsNkt<E8e+U#X=>@LYA4iGim7I?77~1yz^-Of14yXZx5l)96@fh<Q+~!;
znwmAt7+)Rsaxud`nbVk>8$-{vN>&W#rEM^MES^++S5c{801QW3uFT&3Obq3f$;rR+
zx6>J}qv>oqWMcTD`?Xxe52ITGM9rxwFlS}u&@U>rl6Yjx88fN_+m%9g4QVylL-~1w
z5u*n#QOuPxdO@CR)U~1#Nz>?qXG~qi=t=%biS<G9^$F;}VtRaJPojq*Mb9Vx>zBWj
zE>4pvX;#3pB$dcxs>Q6HVtSvNW*>5#H&=_)h(^li(1`U))OgB&Q0wcHZrUr40G>&8
zW>+>mMP(X@YXvavJuG`##becO_*u3&#XG~=wd<nWJ?y!KfKZMxWgA27m3c(8Hh5Lm
zXhm_H0k3on*r|@wW!Qvg4T8W1k9?);$k~Cfu*Ezcz7~Vep}l`sV@Bw$srKlp-s3f;
zaJ<MSlyQsd23bY(c(HtK9co0;g1809SpSr?5}|Vv@UZ_1Hbx$>9IB;c*csVaRLrGy
zyG!ZEyP<p`zlRmWf~pbzH-p}*|1MpdgG6N^9*J5Ghwb$ZLZr6W8-!Cu&?+2gwAX76
zbaj;~K7>~w8mBb$c6C-DMR|dSJu0+F6&W95miillXkDA-14UH}>>AO}Ou5H_*PRN>
z6@rI^g&y)k8fragAP!F=EQ{!DxPCUW2FT3erL*CUvyqL9t;LJ`<7XqIh}hf+OCD-B
z<`BwQ6;%@s#ot@)MkTO9=@M(i4%|T?WCewmBG*L10!WWPK0NrLRsT?bto-m|@WaZc
z@?mE5hvsI~l%hI<{x&>SJzIqS8*9H<#O*B(Omqu{E^tYJI_?w5@vnpT9BgAnlwpvq
zs|1ifK9>-P%<;yJz)4miR58^sV7peS<Zn2&#CUtNf%vysMATJ0<p?(S5LDMfaNZz6
z>FywIj*4_H3C!QMWv85Mr#b@EHh9T^8oLfCAy5a@92C)9rRtdrSS6qlniGYy7$9i{
z)g1WWC+PuQ^}HyET#Jx0^&4EIXpQo&a%JzD6<WpV0ahCvMd(fQbm-(2;lzdre#6UF
z))g#V_y&Mx9Pl&f*1DwHPhk`bv?TYc9ZQ16jJIC9KNFT`h4ldHi14Zas0-^z))4(w
zM~qn0sZUv<N|w0mNV=R<9l=u@0)5pW6crVTba)$Hw(Cm-@@ir8Rcx_^=&ML|ruc0?
zodv`bRDIk>7PiBaos$;7*@&Q;rJL=+SA`Xp!s3ok56=(2p+)a}fBAfDKSEm8`TQ&s
zI-j2HD(B0;a+E$!Y|ydNqo%0z7^Z@eSElyDAN`7I6}6OYK`ARy5LTM|Ku{b6vml7%
zsh1+xiXX3{wpSskEl-{P2Bcg;y9Vhyeti_uu@-_))uD;fv1MK&5%izK5LHu}G$D{f
zz{#Pa{Hy2qOF2(9O5I}T+9M6G?qPmt&=^T}UWjaWB`WZifLz49uOp%5IZf}0oUT3W
zj}I(d`3zJMCG*$*|K@Z}WV>FQR`LZStKzHKYk#{<BnNAG88ozC=_YOmHAwYV21=&#
zpPJdH=Ijz%6w11m(@f=aJhz5PZx)xAsZ(=BYxn7%sH?*+ULo4~Kr$t@sG8nylBtxL
zk<RwhLPc6EROly3$+~gf^#8PnNy4!Op!}Yx<WwcY>~?I$Gz24`G_}%gXWdNy90tQ>
z<J6eD$_)WoYLDCNOY|2VT*ih~{H-~>5?*@)4qwYdah>7f8$<+K=2Os85Qju{l2YPc
zqU|&69-<0cGfFJ8NRj|jK&`)e;!n0#u=Jc}34;jVM(-pnFdND=EbYoP?3bN}$VbR+
zGOQ5GXBVU>xV#i(S?R!s;hBLNwC6+7aVR$R;l2<zp{v+Bug;wf&A=Z+FTQ{F&Drpm
z+)%^vduKxnXTwA6=v8|Tv*_Heu9$anW%zvQbX`d;6twdi*n;QXF{wRn<ncOuH9FRX
z!9t|keQN7cs-U0CZ}C{Ue6;ytVN<zmEq+*gartbNTb?2TZ+`S+WnOWtQ^k?(v{2=;
z?OX<oD5Mp1Xq*z=*!lrS>i$-p8F3X}L69fbby~03qB(h`9VGS5u~Uiif!kdfH$3O+
zW_(w0Sp0N>*Et;1HvbXo6$a9G%sCl<HCzrWv=&Y}D`!(A0yag;-ZIbj1MzAwdk6l0
zO)G0xCqWB0sLuuB3F*2J`<w<hW&ojTg~Z7*K|X~QEP1$(#}{}_L#TDQ#K}TD>3DPK
zQ`iM*WB|FA5ya!Zz)qDwJ`xF!#+Ay8@>LdnRS$i%A}f^j=Svf8i3Gn1hfibyx2KRw
zgcnrgq$0s?l+M$1cdN)J1cvxoZs~$WBM()t2cXeddO$6(W_?U8HS$6yW?Ch08Y4r{
z>!<N)rN^tsONLJ?MO_p^HHDPUlz@Cmw>ve_d<;$W8b^=6-rvca(<$Y2geWLZ8jzsH
z3w!&BPYzC}usAkkfU!T0kou$v`Ce2tJ2v+SCJ!F9w_51YS@7o-t~qIfY~Fxxry{&T
zyMf-IikDbK3G^RzoQ)?U%6OGN2}ud20N%yyy?aDZvW_`@6(^@pfmFkt5$`T?H2nf(
z!2?A-dgT`I7xw={paaWGX5uoQT;~J}9=oJ#OqMg{il^+TqeT133#djK;4~xl6rK(X
z$*yd^HvPUdg>BBB>|=H;_I1RBsN*JefxCUPUuJr9e;g2?y!ta~sl>N(et-0BYyI8q
zOGpKr8|4l=Jmhu1#mPi;Pt)9C3buQW`PuW48SwAnp-88Rn1;yAi}+cHDWj0qdC*#6
zyGy#MS^xMmjy$I1Bm{Oa6F*?51SHpk+9=ss-brMjNGBjcJ}0BI2i_nuT;G*1LZU%G
z<V>Q2Av%o8i%LKjE!<1XhzbJpiA#f@Jkq}B*qbiprGYmcUFfM^%qzKBH#nnYyXb=n
zn^n!;Ziu)Th>LC@3rqR6sofl1aj(77Qn-JTlm1d+ucd(6W0BAnKMtb88RSaGASVJG
zvypHLpBg{$Hy@$jziXGj-8O--*Y)&<VI^F*?7x`oUC8CoEz2et_SzfHy42}+O|cp3
zcw8ezBE7CskUw7gBrpls!=BIjCi2}<jV*`%mi-Rj95PWuLzSx9Y*8)$8$l+(4+!_q
z6rREBZ=4q_0b*X5gf0b>7#V?bRp3yIjFEWc@n3DA%c_EVz-y-9fjRVLeF9v><=v`s
zIr8jsbmvonB3e+@3AX1&xuhT_)Gj`s9|avD7#+IKs+*%DYUmdr+ERi)EOmMj;=&;|
z+qET8#b<#k{<P4DUkQUl{LVGb0kc1#f&GnBxcAyA9Bf`Q6_y#+sv!Y^SnR+$U>trQ
zh#I-^OU4G+&R{i120CzLgVJ-nXDX^~D{N=+TJTaZ(7`b_nH74+V}%}it=mrtdJuLa
zF=sNXb6*&ggQ7IeQheycI5WHisNoHF8#q=Re+Y;8NXSGJcb^$*A$Q-8et5X>Zt}h)
z`r&=;`TN>r2=w8PJSP*;4Jo0HLDAW~TF{G-FAqD69>rpxw49>nnd&DK%p}F_DaEXV
z)ihr@SyB8I^ozQvL0gNf$;~0^fg;veO_nv1$0>A-ex-Bt54<7wO>w;H4UET9oy5>r
z9<)~%JaKf{Yv5|%-}hx8!<(3`pjy~v!%UVGRXKiDG}UrR0rpbR9D(l5NU|k!p!eHt
z$+*9Bh7KJPiq(GMeIr1r{3E;)Nj;a=Oj(XM_e#J`>S~$&&U@!*oqGOwF{1&Lphv$Y
z&Gz>1Muw?RTFwc`yBeux!?y1|Pyd`=aCWNOF=*eg&@2|7oxu9B6SzI)tvGuqiq7&d
zyIYR7Q?u=i!MCud4H0T@V)ru9YSMbW`p7&6cbZHed5VFbBT+Th6HD)^6%%V5FF@P$
zl!L4V1>##+{c%G?ftsjzZ_9_<eXjs%*P8PTO<bQ5cB1F=2Ai5PunLT>JKbx1wg^R>
zpT)@WZKkEObHLejrAITR034vPfB)>syXWl7_WFDTqULW8=GiCiY<TqT(bl`?OJ~Dh
zzF(X^8yfcn&JVCY?c~|;%GvNL5ZT42s^ev2kEkweM+iG023{A9+_?8kr{t_dOh^4z
zJjG)hB7{!GWv|q!tx}m=kMRLvcv)JzuPc|Ug9?<B9T|ea_lMQ#4-cwPZ}!7f^}|xl
z6aT&(pZ@r8^yA&u<<r{b><j7QvJ}+aLnC31LfCV-t13*CfB<#X4MKx)vJ=fj8U6&J
z?al(UEsQd6_+r;E%4w`vi#_6b;>4Xrgcp_+A2_^<ivX=1!x)Md)Og)i7g&)tv7l^2
zRGJ-hD$NG2Bx7<biJ;2^<7t@;DQ|eTio2d-sQ0KF3tq4R=2neuIdx$viX2cKc7w;E
zpYNb?C#MjQ1xL}=BMkL|?KX)7cN6Wkr(Q$u4%K3H3%<56uDeRRc~G~-EkWq8iClVP
zU>?ebYJbmGW@hr`jXt(sY7mWeO+j7B8|i*#4dOu+)D(5#c(+eJ65FXj=sX1Lehi$D
zEPm1p6p@vqi*>k>{ZUoNzUj0rTTrKU$0^J@3`P!AblrD$JEVFNqX0wn4|GpE>`%pD
zsvhh*t`^elHdwAgCLOn(5cOKuItsZiw6~s+Y3f90yIQWn)nF;+yMx=4SQ*xd!@AqM
zqQk1_qv9ztyFKoflLcZwI(^KjEsuE~Il<5gG<w<Zyi<P_)<mq+dc|<=5^U`(0b4~2
zn?k8`cwmM9!k(25=@vKIxowU_fiR$Yj3{d-?{#-`973=(Ga*^w6-GxP@DElzmTyHG
z!H(H`IvpoyiU$0b)9)VKW10@>K49CA%v{WkzCYN0`+6@{eDq=M?cw<O>sJ>WQ=Z=E
z-;GV3AMEhnUoU*W@$e^8oi(FJjgRrckVt~bt5WsN{NmZj`S7k>$<_*3-cc$=+mY7`
zy;e#sMhr(fVlgVLII^FjoP@u#9@8k;TI7n4j__@ij{dN{)dWnnPgdx+@QwR|StBXw
z2zWuWi^+TfzS;p7DHZR8B=rP*Z0)NtPdKK0&X0u!I>$uX@-jJWTEfS*&|axTjqy3$
zLY%p1-b)}97KX!->S$$_b<-}XIeow=t3VBuH7r00gk&wN>{ehW;XyZQhA!`H>3Gd5
zYU!{D8{J4nFO+qgNrcDLw!k~FG2xKX18)w};?%f9Axgw-OepiqL<rHL;*&DJ%6`M*
zTy(_>uWAq$+G=1diy6K(q-tq_`3od1li_hKBvb)obeZ^0w%UzV4=9{)+0-Ix9IG)6
zxaqqs<?_*e;7b1*%*iM*tPadDhOTBjgd{s?tW_jJPoC$oJV@}~c{GQH(GY_mRCsJ@
zOxQci#Glaulq}gsz<I$_Yng!(9!^?suiX<7B|?@ik-0GgEZzty4T~V*%;;A##<x#X
z`n3TTZU}f@o?ipKR2o>Z!&7CM9z-Cp#_1GbC+jD>6u&sV_C0W-I!8hWt@&V$2sI>)
zl}M?Z0(}@ciZezm(Czv%@deBXwRBkE03(HF?u!co4RDt(_jKAJhxWqYBE(_dKbd<s
zweW8K)!Wxg7x%}b1Yx$mIe+zk?0s8P99OpHU&-hfpA!M2=<b=AGY`{e+-*<WF~{AG
z_H>_!^Kv4oN<oUc;JSdt6ZeY%Nw!oV5Hc#OfD*SV6)GqsR6LaRsunX3_IV=)fsjGb
ze_{4oYww*q>zcbjJ?fc>R^!Uc-g(=(bFKBQ^?l2<J83!B1Y!7O#%wFU(bHjtj7U5b
zvMK6pML-pheX_vp3hTHv<@Kc$q94T=KcOY-V$EH|TC!3bk19+l{y#%BC39F(I#EHS
zLGTMm1g6TrQ|G7-<sY#dLKknZ;{2D?^PgAM_l9xpEmHC$2|aQCq6QMri^TP+u!4w-
zDgR_@!xg`C3KYZf=f%=GeLuUN&)UvM)%4j$R6OC{a73$y7{0VnfXECqi+xY{_tt2$
zY~#at^m1vpBSPp!jEy6xM+_RDG&7deuQa9!_QEV<fRNo4%NLKCk*H;`1II%~huuW~
z$|oJWkFsyN5eP@9Mv!&=Rp}u+3^a;{<EZT?-U%zNb0CRR6j7_rfg%vHxW^U^VJO@`
zSlKkNkT1t!39Kp1;&v`|M>|ij1w%;Xkpf<}QeuzG>Xlm^+5pB5h6zUn_&~4)rRXTt
zfy2CLkjEBhsD?!vEakd)I|zHEscl{q&WzTxaWs(yZcxUfCY0U9BPJL?fe)-G8$X07
z_`)Ie!D<a4G?WH!91f%o2fxOopPTR-W$oS!PrWKk9QH52PVXKKPEkSL(f!9qH^&b9
zS4qo+SLsz)SWROLt~$J(gUvCnDo1s2uvic{L7EEilDLpT&|nIS^Y#dv$RRM0;UEQ|
z2iMS4n<9fvq6$*?t{~btKtp4@PZM-_o4CV_zqeDnr`g+R1@?hCI5cmZO{CsVk3)GK
zSGzmCdFS-n?J{ez*&@3g1O%vRY(>WEF?v6SiBOM35nXwKSzg&&t+&ev973WI8@zXb
z3LFqaRmhvE(UM~j4eVNNWTEnnFX9`Yt@)nM@i!RuO`%0hFu^9-$*Pc@TF^F+v%n1P
zbOVp!Skl_?!VOB)+7Us2sw`U<i}PxE8kUK1Fkwdg2GP1X$ozs9;{vJR<!2;Xw;*?P
zi_}4tTdJNh-vrb!kA-6sR0y_%M*f&6^2&=vw$rYVjspnczAPP^6Ln-a4BV|q;6qC8
zAVug#<qiX~R?YDsv;hh3Qh;7uj~N1E#P92c46|O_d_I6U1Y2c<Og}R}eBTf2gAvtC
zgRl{`m@&gX-Dn`dx@fo!ioTo)UK5G9>G&uuhJrF=_pk4d^Aa#-;6JWX_624>xB&&W
z2gq^d2$g`H&QamDgYo+nA@e56tVp_zEh?p6iGxfZXt?3RC<gr#!Y`5?EV!YE%mh}A
z^*0cdtJOhGA50rE*g=7|6{Rw251cYwG)UMD-LoZ9aZ3y7H-Z{c_&d#|=TOTH|Mt^v
zhRfv>sdsGcI36;CVs$eP?2ug(-KU!X2NbuEjiGU<q{?9~q||={7jb`(4Jqa>vgI34
zr8NzMY7}UV3=S3)$+4tORf@4*Zn7<WtYKM}?pPBEfb(hPg)v?g)=&0so+KxaXSSpY
z8#$TH;0G1GPaG|5L+`T#$K!ov2Oux@y#W&Fyt$l(J`XT*dUpo}4X9@8FRfs_Rf+dr
zMBv!Zh0p5+kU!#5oXrN3t4q$;blaN>HP@82Z#Dp3Rf`_O(9biT;?VrAH4%@-aZa-F
z`q61e`2AdQ>YA$S=MP}llqf!5WZRaKvB1ls8Rf3f8^4IScXgfbi;RZ5G`ue&G+rr%
zT}CG2%51vWKzhw_QSKDzhu5o%K);Kidv%p&?NB_IQ*JIK|B-wToalVR;Jjkz%Fs1L
z>UrsR9pU4P2;EmvBEHywegl!;Cgc6da_wD6?#R9y5P)QFHK71iEBAb`0Q22UCxFbh
z^4azr;J~`T1nxrBE7eNyfH}t3;gg-L^=${FU;s-uM&AphV4VX5aNIep;>A!ur$UsF
zLCx!dGozwkr`+T{!VjWli2SplV<81q6X_iw3CwtxM~~W81aO6qv`1eJIKdZZ1~$YE
z1ga1+eBodig2IX&<{&429hrFzh=E-<J9f+f9z$-x$!jh>)J7Zf2nS37lt2K%YCqOJ
z<Q&)m)+fo-8?9>UrLd1atOOo)qY{8S^byuDjSKGzc&KR;)&mxpwXjCn%&CLLNs3v@
zVu?B2c^D&=6yco5^~riB$Q(fv?*M)XTGDQngRGZ1e404KtEU^sgPTW_(}(>tP`*%U
z(DtWs)6!oPkdXH6oWKlnRNKtV;ic*pGTOxmBfQs8#8272v%&tiv$tI~_}TpKX;Q!u
zZxi>8)5PSvKqT^Lj!S4^j+aK{5tINRVoX@~$y#<j)ojD!`w3X+;uY%qJtd|D&A0qL
z#x+I+h{4>IzM38l?Xvc6fQ)m37b6Ghb##adyzHHOP<up$SKV>SqvJI3{(~7;%+9dU
zBTXdS?fKnE*dOk(qn%++DbX9!saPuqI-)f}4zyS*J|nP6xA4N{27<%V$9@hnpN5>A
zhR)zQfDI}*$e^E>7Df13wb;Q8IsoTfb=OfXclcX_J@QWd0{qPgaAFKS)F}yHG;nD1
z*l0urM&^{i^?;G;zHmcqWaNFoM)n5{EVbAIK00v5bNkcE5JpUd1181&863BY13$7q
ztq+#GdN%*6@PxbR=x$#mSn}%W;>qmY<6CRYv{Rk=za7ZLg?R|P;T>Qf4LXQ+**>ea
z+kXMbNrzdElYC;gcNiabn|8o#<9v3WK@#FZNGU91;Kz?+%3)5p`OA$dz^Zjfd|N2L
zq1Pdga9m)Y5L@`u)o8f0lr-3PDHiVvh3iM?Av*9$*w@i*MJ|_i+uO`Yr)Ae|fi8X^
zW*UAw+7%Aj@GSghL*(QWGaj?lHnGyf#-PbHPPvbM@Ho1@IUp1OS9h5l;Kta<xUg5}
z3|v@2LvsyREmQSvBk)6O?18JO)xd>p80c873r7l^wuV^6=6wTvrMZV^LysDx5|C;|
ztloLytK<$Iui=kP{43eN>!L#B6P)wzN-SYn@=Fs&AgQj4(tsuos2sPXp+$;qZVV)h
zV-YbsKPVSo0RtD7*RV_&4BJL{!9T+*W#Sw4{VaQ>p5OhxK77G#%F4H-E=L^NhTrT@
zVEHXDAMdbFE!VHY@<b?jq)7MR%RT(M!k4=t*V8AZt#$(nXxmGvP<w!lOsno2PB!DP
z01y(=03)*E1<v+F23TeQ3u*Vzo&)2~BWyr+7Z5Y;5u5NZPc#7|X$`Hc1uy8pH(<Zu
z{mPcz9?q~k=^_6AX7$d|jSTy~ANSRrU%9<db9dDg_%w7mVghNNFBa!COdagj2d)|>
zO$!0b!0*0~xgG2idWr1-1f7=Oh_bIK`$}p@{WU*#81Dw^q(=l5aIU6H#K&Y{Xn*ox
zX|V<;(=rd358yF~XmYzcIp>H|e6Tc#l?O#kTu6g(l|F5g<`+|CKf((Z*LMSd4P3$=
z7wcTMN`xe;5%9_##?2i?S}RwbR!N>j0=v3Tw3&dYG=+@(KYbU#(vfdiCws$8(u(qz
z)&o|EnLni`f$;O9{X7}_{w|2P1>(<(ZC3$efyq1^SL~zGx+a|BI<Ud)rif8#Lq+|z
z6^h0{5Qn|nQ0~)i1uZ)ig(-#E$MIM%U@Tu~zw4H`bW6-GCUHg3Y=W{-?8_6xLPYWT
z0U4jGm%?mI(DYd$^9qK=S+kg}4YricY{)+)XEI2psbcCGR_sz3n7qR10Kf^y%=RWQ
z7eY?vq$X6kQAm7JqoVfXmcvAK34bk04ywFJ$dRH>8C-N^Yb9OlPB~@OsK8rR6-HMf
z7RA>jI5gA*m6gk;${s8ZQ0TNkg&XBY7T+SGq_>FW3W%&ONje&$<-A7=>lh_X67y>f
zaj9k63OmE$Ax8ThuIi^?ttLt%$4rdItHp;*uU%!ZvLRzwY_Rh0pn>G;kjSWs*Hjqc
z#^FF7Ye;S(a5Ho?{OEXoT%;$haYP0vsZt2aB!GIBZ+(bBPygZI16_bdrYyNY7uCqm
z3oRv%q!bP|N3oh@L_#MnVya8n>7wMT8c;ACfPA*F{WK@C6~a)&Wwjz0ur>}h*<wx?
zli9p4T@B-GX#R9(2zYN#@7z4ScKZ@=;V!MAKeCxWo4;>B`N+46XCsrRi@TDl>R#dP
z?V;0qyOdycy0Ct>c~cjs(Q#skNmnf?=|kwog@v>bf-@_0kr2UgLn&psTod?rlsJ0X
z=IaVq)gC{ua$LsA>l&46NrJbEXn@_iO1$wx^Xyf1{WF>w2#Fz~q#Yq@G%AUH^Oq39
z*~3{QPgBU2B&U*ykEF?(z_o}5Rd8HU3HPePEG(PRngqv}nqa+Z&WR)lh6+^Hv3a4A
z7^|G(Y8XKrw&MWyJ}0%5I7`+g++|UPf_4OQG(+IhI;bJRL6iLK^r4?T`mxJZ{8}Qy
z8WrU$d5PX|qO402x?$_KBqf%?X}16j%1W{=&Z705z<TE3hc3KRW~})GrqP3zm~^YJ
z`Z|nw^!Y%%-LNkO;%!Dxw5zoFe8AreGDjin3Nspl_V=?s+X$M`aF2CGALeoFrZTgs
zv|Pn(Dy?^0b`(BEal?!R&0Z^n+r!d!3Uuh@s-TO1hxw*MNEa*^hM$1X3Tq&7?I~Na
z_3{z>36XUt8nEj`1D;XUi|X!XB}vz~z>9_vLV_BZr_4zn*TRh;ZcuLKZbgjPl~M|f
z+K3TkeriXRzydILU9tiom+H3yrr!vd?GZcFu8mo_;}43!QAG%%S}X!}lK`s9mvK{9
zA@aI}2g~8woQhB-cuHk8M2vdhz&%!!a}ktaZl14kIzIg6Tw$Wij1HJ1<0s2P^k@K4
zqGHYX3F1l<NV?@9QpEf<DgdkslCMj+dKAlkpzC$)Zbl$sd0gC}5RTMV%F#Bg8UmfD
ztfT-N#wOHuk>h9Cv@V7<;_a<@xlNF&^45TQQq^~cN6_}C4X8DPUiJh~1?&3vhj~J&
z-n-+{Q-WC0lO-N5nx8tlmOT1q@-Mq%$79nceZzm*9cMP^t0${R>vxU@0A4y;c=9^g
zcd}7Do_^L0mZip~;CK_WCu5_R*wOfLhH~ssClBtuezw7mcKq$u@$y6i7>k6H{t75o
zpgZ`BR$m|v6O1<Y#I{*Mv(sx!$!B%h3i&Kvn$v0q0-DX)hk(s)nvLYSf-aP9TYVuZ
z^}e=7V{p&lgL+<oF5(BC%GMxr*&u-v?m`)2$9FM4le-vyL@g@Y@=)R=1C3Ld&dX`u
z1(m#vNkNp!i9Vi7<%dVsWRMFFT)}Rx@~ro{BcD!G41-!SFt*&+2)6s3Tk||%`NQ3%
zB$P(62i6@o#O-bf=81b=nr{OEb6-b1)E;g92!m0az%*F5muhM`;u-fk?B!lg2%<&w
zz)g^!LK(?;gc(%<w%uoe!g$b(l^#Zzh1VDi+z|vEkW(vp!Z9?%smS|~K%DYvA$}v{
zLmAv-1~;TAFw__~+~FY*bZ}H=Zv(|%$3>>>g5U~gp3RJOvNAk14K{dz80kKh?gRfe
zzujhgyB+p)qXkO_#5Tqq9_>N)Z{l!Z?{IJh;u#>NvxNJMpqkDK{b&qLW~#DTW6CE4
z8gfDedJp3eMHzRXP{yP?*x|7N>uuuB>8*KUvo=mw(x($+Z-=s{%l*dL{Qb8R5NCKh
zQMWEe1PTPlG`iZ!TC~uH@Y}N(9w5JayMB6Qt(vXI;>V3~)vBX!xbf(tq|60el@oga
zkxXM0c1QTh=#>iYW|a}T57mUHKxi19VsfT4s5Cr-J9$H7ceo~RI;k=;8#1pjt@15o
z(XE}PL_~6BbC}*ZLjG$8rFW4m8hq6SxlKdV#~&6i9Vc*(cEPq-+yl=02g?_aL`nLV
zf9bfJ$X0yf;q9#|@WZTA$p3q~f1TN^LgDu1Hf*6(EHHr|%S@|6qf#eXC03nnn0%`T
zir?~p`!>Q)At&b?Dn2`C`)6f>ok9gXWhg8J8L@#xezpl8X8yw@cp1za;X4&Q7Le7X
z4F*yPsuJV*Y*Nif-sY#M*!oOV|9h$?d!1Z+^>pj?-LH?YFMF2uVU6Cb-hO@e0c%9b
zWeo&=J~X1>ZC^Gp=ZpOn0~iAu%>5SM4Q&{CPO-aq`Iu3~8WS7pSYyo7gas))lOs5|
zo`FVvI`wM=Fz#@yvnimopqO*(W%%W}O}#q8IZ8!bxmIEsewx7hkk*&YG5x00P<)=L
zFdfAZ;6p-NH0NVGWEp?3><$ZMaU2mw)wm?Z96Ug)Gu!E)nSTu7SO%j~wL397=UAT3
zh(<dSoK3$r7v0C9s1=0N3;zdU(sCsDl3S&qjbb@ADHy1D>zbgVD+p6HFSzGCTCWCH
zh-Is2<&cg}xgI3_rM0XtX7}oF@RO;lfB<M<)H8jhvPGYW)^C%zMFouLQkYcacMu7<
z!G1~PazPJGWCO+yMQtmHYBfTc(neU(4!F%{zZD34pp2XVe;IT+=s0qC6aQ9N+Wi}B
z>ONVTh5|1kPI)-+6wAVzFYQ`Eh%Sq)atVP3QZX06eAUfzw8UK^H;t|IG_ce`|Beh!
zbumm`1)HJy)7+4u+VTJdy3BdDQPHs><xJS)?B1j<DCkD{6wChobOM@CqAADfYQC()
z>dTtpHCKeOAslCLY|d3qMOgy(shmnt@{|HMB9{Obnoy;2V79DL%iTZ^qraBZ0ZKF^
z0_xORm5a|55eGz?i5dzwGw84^;Hn$g2oAlVgC_}%8ls5H8t3t8q)ZJ;kxE<+ezE_I
z;;sq8<~GpR(W|8gSLa`>Xw2*}Lc-&z?eM_#*keUZeQ@oMLCDm&66c=JWkvH;{gks0
zd06?BhnxA7qsMfQ7fAJi5>2ICC-aP&a-zU7ASStdQ!H{S?vx?as+QL^rutO1eXnZD
zG1Z(fg_SOa2TI@`=YX0dOvICPn|I-=Ys;wWtl-Ykstw#7s6})^A{-J>jbtECp!E9A
z=kql`B1HL8q3`w6Apm9WEs|fzp`h6`cNXV{$lYsRD~4RhSG!N3YwQ>N$+;mocD_;X
zf0cSA31LLcD3QDA>Xrn}NQ^bpV0Lpr5X*1+TG8a!hEIb38nr^vuveWGr)AJ*1;SOp
zAsw8>_Je5~yV@EwLnfq<hGMN`!Ir}<pYlCH&y?@U{j$bbi&W>GqwHZd;ag_(7BPD_
zO^$h$D1j$%WdvDTk}m!fI54Mp*-+eXlBb;){&HFp(N*#28{sQ-BgjMUij<Pwaf9F4
zMu-Z(t))~f+6_)`Aclxa<2!5d!@{=>K8U72XouMUn7^Wkz+&$n3aGvyKNQ-uvyVB<
z1b&bM<!9%Gg6`JA%I3k!w9Id%tk)co(YgH9#^A^b+MQIkpH_K7*3dd!rnN4=vJnEp
z9}&MgOJ3UjM##s>`kkY(#n&52sQ*;_w8p>&kylXSWlBhT@?1=EmUhtlVFSLNGsb^3
zf;2w;$oT1T-|5ntL4HwV@Zu);Dw4dRBCC=3CDJkp5g8*!6t;({iquE(yD<PUg4udW
zM|MSE2?Rh46RSMB_#1MkstJX4pFH^nE5fFqJ6K;ZT+3j|Z^{HamlM$#Xc!ix2NEFw
zc)&nIdJJrGu+!(oBJ5<SW_=?B;S02tjb1RThyA^1NXNp@ccW$yN#(vD_JAL#bHs1i
zM<V329sJ-^F+X@f5F8_=3Q;o3IFc2NDtJR!0G{0+L94eeFfJ3a)VhD>;&KK7!Hz6)
zw8{-&%Cc&V56qx(nB~;M++tiILy>ZfBug=~LmwDr=4oRhIpzLTBlbm$aeoL?fNlU%
z;DtHv$?B`8&uHqAHl2rX|H;bK$@)08lQvt;SpRR>7>IC6IL8ct;6IpR(H0)k!a6d;
zjK?})%=k$8yDJ<3op2qJ3Ni_*)qGLW&N4;AouGY7b$dYF#i=hN$bc%-xJJDQu4gHM
z1IxmAaZ@=mkWfs4CEg9&+l$fC9C}c#`ia*r!I4|4n<H%PEvdTG&kLN`33Ho90t>*B
zx8c5Gj(<3;cNLes<Xp0>rkWEhc<C}6Lb(yT@PZt6$VQjshLdT;pBL?JGiC*zXIdn*
zU_b?y$tE(8q-RzZ6aleO2pJ=qsKdYT!x9ah@h53?A=2<`ES<6<p%!l3g0_g?_>hnK
z%XTko1{PYx_I1Pp%-*s>x(s$`s5xeuPuvH9TiBc&kH*c29p>*HOx;jj1#enu9o{=c
zg8B9Ps1Hq;;<wzw6Gx7g9wLPqXSiecJ`S%vtl{`X)IbC!itl^O{xS~fHXoSsuF!1I
z48@(Ko7>{R1Q>}tiKeC8a3myxS6tD?l+s}NU}aCh8|+PIeFJ`x!?guOK+;H}H((GU
zo+z_g5=B*eq{vflzcy20C3AE*ryVn8TgpvVUPnc%Q%a<Ch3wn3sBPF2ohb-MLRdvh
zO5l~P94wRUaV%!dqN?8tOIZ%kUK2~s4H|pm<uoilDtsLXn8=}t{CL`8JeWd+0z8L(
z{VrUDE%skK92h(7--HibnytSFX+{Ivu(f~VRbl-owIK}kZ%&evhy6o`{l(_c5h9`^
zQ+!DP8eq$}c(T^j(y9#sqi`o+>0op4AotzDW}k#W&^3T%h0%`xaEiJTh_sMC5P~w)
z0d&S5IaGAXg0BMpUSy}Hi>V;h-PyM{Teut10u@zfOZSYk`-!th!)Nz~jI-3&XNg(v
zZM;o9Je?Rjok;264d{NTOmhjrR9AFR5CLS=s&OhvZv=OeMNL)6^BTJ{EtS5E%4f%Z
zuy^k<YqE@vQi4N1{CA3o^ByT0L+XXNbcH5F30Xn3=KX5}HY#sjLQE{cZ`V@YQ6l0x
zUUp?*E$8MzY8ty(!0uGIW{v>YgeaxTy4RM?KA4DTh@)k&H!8kblvA2ErsL+cBRmIC
z76Nv_I;wzuc!?Ad;*H6gEo}Kf;yx+FeX7GZ3507%TxdyDc<t459GfVv!mP4~cLf3Y
zK#R*UEH9`s6uR;aV$DNuGeHa^xS0~yU~NH7U*j-O0$ow0(gSp;&kzbkYX}Q1gR`oO
z`fvk|eIXPN)k2QFSZBB=WOtfD`bq%e6SNJRJ``7By%FzWmQ?RmX6G@tmi^O5gxub&
zVn&6z8C6PF*ilw3JF2u?dZ1lvwZw_`z!iQ<(m=xHW54oM1%*g#2+BbC-pC`)eZrP9
zFn}`3*uP!2Jj_v~ZMMX>WcLTLd8N~>!%s;bxFF4Qv){E;b-8ENU8>hT8R*lC9YMt?
zA+(Jg$cPk<u8I1kgi=m5Rv7_Ea4-sYHAJmbMpSS;BrI%>%A}g1?JEl0DS^=dsBda1
z4+Id7<f>>g0AzYJh^SN_Z;TNHYO;(SI~Y_3E0_qoN>D}9s3@<R!UJO`&|#{2aH5&&
zrZNE2V53ACyl!4StnjN9#K<DM-%~yW-MU8GhD!TKZGq&k=Gey4s9N1_W5v`qf@Sg~
znktn#9@}Q-qYR-OeKUEokvhrrzh0kyomhN5H}vN22s0nK?br;-AKIQryEk~y1^N6=
z`i9w2WdH_#KV3Y!aqal}*zwx6<J)`e@b3v|_RD|xPp$rFYnvIZQn*vMrQKz=BKDwx
zo4FbbxJj>0snc%tnYxHsn;r7Ec6IuqKmYkTnl^{?IhWe%_o_QpwRJ2K2MR^MZWp2W
zS3Uk}cwzl!(SX@yw}$L0r>q4uhA!J7yUh&wD}Y70#rPuL)fJA^#mr3Nrx7hg2rnz*
zP<6wjK=4%Ac*$v8GrHvge7yLMAxidn*w1bW@eh5f+{5~);vYe|IT|bN#(E*Lg(O1W
zS|h9&hNoarUX8bA5LWs;Vw0S+ufvSAV;*hb18>bC9HidAl@W~Q6g_~2YBO+pHiV<Y
z{EoVV!DdzuA8>?)csf&r5=AR-O(3qJoFy{A<S}-!>q(SqloBo>r7*Wj(!cn3P5Aqi
znwcbC{xu9o3}65w*AO#wPU~#B@9gf)#-O2_S%gv(;qRJov7IDH;yl?CZgvXY+`0u}
z<kt!s4@WLy4Vi(GnopRVNOb1bR?%%s^UOZ(Ru&~ohGfww;s$SZMx&T^w~9_z8lA2+
z0rVwBLLK;6i)?cvtnc@2NKPT%w_i6HQv%$JYrm=JxGaF)DZq8i&e}qDLvM!#;)X9`
zr7b@ZHrSqyNbbd`$Wj|bSA(>p$Qc+F>1hk(H;kYKpGB9$1LNbUpdMI7OPS~i2GEqB
zB2%3V>m||@^-$3>{qivy#sy}JIya}$b#il4d8dwVXO1W99*~=lWo>Ret_jwC5xyMb
zx#m&3J!JWj`!Py05|lvmN^BH-V%@bzY(FGUhAdyq?uPuGcoeq%;iJz!M03xtJbG+l
z|MQ1`_}sW`wnc28F0Kc?l9JM%;0#O=5Tzc)0qf{k2TQA*GLQpzdu8HIHRYfd5&i=0
zrD=e`%D5&CULwuAOv;Lw4B}-om0Xfik_uK)>C7G-X%0^k^NB>QxHh*W-t#JVXDQ*n
zJGa#PBJ-8j1PvlK;4mkc7z_Mr6x`4d)kGvdRS=m_3V1l9bXRd_s#q{(-4~quQBl{_
z3>5nR?@g^LEb+plq$yg1aq*HPE*gA3TUvi!><o989vU%bx&^}RwnZ;K5K;o38Uu;1
zXlb4O*Y&zXr?GHnIKX<sch{ubtBI+Kb8IpE?8s2URc3$!wE7!4F|{J5`dfB_t^W;X
zrcRuhN~suI59~iEDCQUK#7u>FZAh7msT(}s*8fV=mC+dv_1ayqtI-pW_;r~to!OO{
zvZY55N+cF1w1~}!VYWx&T~tFBHm<_NO7p7U1UWq;VqF3D0pKsPQ7S5C1<a^=%Pl^B
z*#((?w#lLk)I@Oqi9KQ;l#sC#C@sgS%&aS7h5QlK0uNZ~=fH{??9`NR0UIxV94G-G
z@GRYD=DKP7EmSsB&zYSo+Z9f;g>Yc{Iv@(HJ6yohum0_EfL;EE1O4Twpxz{Omc848
znsWniTllv~%S4EobBHqCObrfbJdb|GCbgPJh&YL>9>`rB?}HXrs+7wS73hQ@>0D5Y
z?y{AfpLf|WZ5)}f<D5fd=Pt(Q;PH$C!6XT>q1?HIAipE_PE4>{>~m`A%5CXtRd#Ds
zp+mKF<APjop{!~Tl#=L0b<=wyD5O4PI%ocXp|df;7D#*3DHj}=+eLRF;V91cx5L`&
zpxWlo%IzmJ#wk(AfMRbrz8_x!bj=2L$bk2~iy#I-5AbCn=pcHPy;wmgKcTwLcvNvC
z@oHJ~V3<uIIJ5GX25<-6cLse<ao)a&WwN=47b8X;2R7h(*Re8@zwI%4+wapifs_+_
zQ8Ur~l0(1IxHw%grRtt7xVzZJ;ovM{`8&+xRgZKyFvLdyaG(w(4+p1_Eo2ZlPzSyd
zLUy2ZhpebblB&{-3=ARJ2sRlUXT2X^Uwpkacigwknj5NfE#&ankCDUv)syu4Nt*p;
zN1cS;`-gDrGvs0&Q|c|7s;H##55joM2<nsAU7qu%s&9cpwkqBQ=Gi;m{F*4b+(Uey
zTz|?5aFt#Pu{=>#nZS)X0sk+EJ_hhuQ|<%PI<X6n9!M5fN>NT!mV3J53)s{-eyVE8
zsAdKN+{T{@+0*PD<({3*_VGLQb~1fhm^@ut$4mswR(PAZb2hVPyzPH{y08YcuUPu|
z)8?*0jIpP;=8dzN>`%EYw!uJ$S{M4*I2%g+v^jKolkGP$`SvzQ`%yiqUQPpz0HBRs
z;0Qm&Jr;Rn#3jd<b0sr47uq5Q1O!)#5LV$?4SOP=pg@t8N}WEbA=poQ(C)c%W?7!<
zJu8zMW|TuDMN<$Pl<cs3La)%t5646E4ypusny6?k;H9jVDVkR;q}9xnsQfTdk*e#%
zlvlq>!&SdZf;_h9(hL+IB_Og+w-^n-+Ii)Z4d)?0+46f0JG;KuRX)@!NW_z^$l53?
zvaRyT9@b~Sr5gwehRY%g1Cl6QH&A9?@Y}eyig^@kP6h>X`lu-#MU1#*ouXSq)Z*r#
z92b5ph_<u1IOEFjnBjdxE(c_B7=kpHD1+v)-xMB)SN)=BB2O4uJXlM6sP`@B;7tg|
z-;CI?T$XW_pSi3#)=dwg+YQMd%cANRB^K4&EmK4I?M|;^=y6T#op8hydHM(bhqL%1
z4=?Qr=^rx`t=z$u86@bPab=&(h@Br%Xp{BVk1@|NuJ#DQ&%9az!K;qfFjb|9vC<Jb
zN)bb*3u+m&cIwIRIoBLk7eoF@7m(X^#z?i188%@Gr`l9~{Cwd1VP9_uizbZD0MO0?
z!>?hZMO51C`Nx0c(vVl!jz*laS*+(n>MajP%wB%C!$8jqjKU#P*LxFzTgRNU9WOI3
zY*5vZQ+-UlHnQTPXuyT5`IbC(2PT~bom)ZOv!o!gi`i%w<Rq!i6*E}W@NofP|EENr
ziYp;Wh3BCdgyv_F{}f56Al-!*#!l=H8%U-Ks|(1Y`i!KB`spaUFN^!*`;+_EF|hzo
z12SGj-^#_aC0$DuqS>F*K<bA3-+m<=;gM}K7Biv{=)PjI0mGgKK8!Ft@=_|+ZAChx
zolfY2ImN(OEdAPu8FP&s){Mdw-K7*el4fcSO$SsV<0{;i1v7g{EsQBJ>KsuuEr-t+
z5RZSXrrS`zjl?sR>>kR-**w@N$k;ueA?5hY)=LbM)W>@==1&;Fn41zTCCbhD>1<G1
z(1_?Eg9$(K0Anh4;A9G^N6w2RBXQTnTDrA)kyKPqu9Q?xs}r4RW*yO^&5np9A<8Lo
z&-7)vM*R5!*ii(qgHnblH3@zfgnB5XFI5%$+%v?!IYFGfAo@VYX91hIGMr|VBXMM9
znDj!is(wJpb4t<r3hbpunSS=*5tU+H<5^H;x}>m}mgW4b{DfWf0NR?R?z!z5q~&cR
znL{43j@TSFDoXcTcrFr8bfTFn?@WbdU{S59aDN)aN_n=E+C=77ZU3*n+5OupHvnj%
zBI)`lQceTUL{94X7TDB@X@NvEkcGu<E>k9^PRQeesdF2r-)9aJ1?>C`cFw`Mu>WQG
zg%x3f#e^(ONaTwOtu-jJK8EbDJ-Bml$~TbX`tM*U|9#C<*dK%GT^-k~jqZG{<a6Cy
zBy8=?-I3SD)X_JS6g8mOg>n?O1rPIk<7jyN^^?@=xuMt3zJ7ge>tyfS<LisZeamdk
z$+OI>r(4aWeI2i-PM$8jS-t(L@bqM<pW+LziX(6OZoKKgPUm#A@Z@-Um(GkGtlHVh
zPxfDSCtp2XBna^6#&_)M*e<WPa>w^~-}F6ry*>BdXu<~UHoMIi)F#r#cI^Av<}n4T
zYEzvIEO!SHf6#31tf^&_M~24U8lf5y&%H9(>Nrpv|L32zlSOzr`m<ljpRL||m3ec1
zvrCtQ+-~!H$CNf2_er*5ciHz<d)qIYYxeK;o7rtauX%LtX7T3Qb(ygajj^w`4F<ky
z;sRN#|IkFMScli#O0`+Y(oSncrIT2z_fc8V6tt<M!B%#7{hUe%N%Y%hyFPP^-PUp0
z*3KlQnKK;p%uwTM8uodA>s<}w(U9%?SB=r95Y0qmmjkv}#crn^JsaItU2JZPSyAny
zeF+V98^O(N4f(Z`X`_*@8u`w8NwHn=SZmOVb%g!V%Dh;%(fCYktdkNO`Pr|2-2U;=
zzYQ2aF8|xmkBdKMe%ydxjfr`%@*A=x`ic5xMbv;VS60Y3)-P|=n<oaJ^K{~4-tuRq
zbS&)0`edvK@|z7c%~7hew9b5Z*0QdawjWbD{p_=D;?DIcdN|2#yV+*ZGqyr?Q*YV0
zdNKcE{YBx$-iycV|2xKu{EMAG559QxV)M`AFE(Fn{JAdcIAkz<R(i<Ji|E2XZhZX~
zK#O+N3i*Q;T##Kq%hzUmUoCCLVx_I16|!PhAXrM(&DVx(1vsY;dM1_;HKFvrF`W?c
z(XL>@_5v1j^E&M%Ecf1PB(lKn{SOa+nEm0-4~ZXEj31VNnE&BUW8$Gv48&7#eZdf6
z_+|FDZd-Q7zqTL{6fms7l~OWNN=Cz-jk1sF-KJ-SV>MRyS=oMU681x?Cz~7U?u%*i
zp{NxL!2pbyrR-%ZQp(zO2P^>1cmx{?x$Q;=0i^e6c=71k-NV5o!l;19>LoO<Ab7Yy
z=p)g`E+cZ7_N?V__U-WL{YB$6`|xbyUcH?IAUkGtn=lgT^{qRv2F<ENVnr0alKy;s
z5&Mdi=@mr17NiWLA=qG3N;R-I3MERDcAeNVp4bvr0oN6U3AN$)@Hml$P-3q;Y(>1x
zW0ZC+Q>{l2*0u;&p=#P?#ANE;Hj!Qp*u)Az4e(<DRRR&NXp~HiWB97k(*^XSM4VJV
znc6*Xe#+j(us@C^p|Jds$u@Y%#&-9?Tx{&2VKVmo%RCZ_K86T;j{OgU>viiMqMdk+
zrA>B`-SG9QD<lz(s0X34R1_y`aFED<379cE#BV1^O^2g(?;~sRPi9P~0qOt-O`N2n
z9tI+%k}G<5P*=J@g`^u8gngtc@-@yKzb=TOc%+eNMqT;hjddVm?d%3W;4*QK{>LV}
z%>{UpSO|&1vw{Cvx9lO-!Oo?yd{B6NP`GnY$R8B$9_)-C6xI(4vj>F%@p}mO$sH7u
z4RhF?$6b(PYJboR<y7$&o6?Ur>k?iGnyKAFm3^ybt3#4{tiz&j3Q$Ygkm(sR$(}Z1
z)>pA6D5StZrHo<XFG^$XH(yAJs<3&kU@ww`i9~6e)*3=2Vp>X0s0rbcOp0d`&vP~<
zq({1loNy19vDEM~@N9CJLq9DOnO)iuWeL%)%ed%N?*E%EsfvH#O2oFl3dd|;XX%-*
z!?<GhUhRCIF``DejTwNwkdgf?Xmpj<+0+&^{w`#8J)iw|Y`3m(=y{UOtY(YR8ID+2
z#KQMerFFjL@0-lP#^h`6b71zg>pf#<@!$2{G;k?CH@}QCQ=g>TlusOYuiz)VzhQAK
zA0DgOjAl$_4w&3e5S?4@3(;np8o@;16sG19-_$lw+oj}U$P@<Zr`*BHw37@BKY%Q%
z+!yhX1+1OxJIq=1A*Y9^R<>Q*)sQNbAuA6Od??ST&#1IdD^hrCChj(yItwpAK)Ngy
z&tLlyx$;{voAv`6%qN@kRN4l_gJl1`G?q0}QjhLFdoy?U_1w^5KPl6#Ls}_heSX7z
z7|IpxUpO4-#~RHGN((*64fC`}o)fvvlQSI-rwmd`<?bs4POhBGG^%VM@e?Ing4~9h
z01BCyML{?VnV2<QtP4reJe^4WlwSwwytl(skZ^geV2~Gv36`X_QI<)mO%FAyP}53U
zrU#f5xs5*@R;2~Z4|LKa)dWJLp8xgN%|rD5l5cQ%d&nuwBIy!O@G~mizJyF@tdpnT
zMA9GT`w<ONO61JVbKQHlZ2f(a)90x9x%@s?KHa&q<Ah^K32c;uwuP0rIRn$rC?~Bz
z9^(?Fqmj%7`I3)li`2z<7a3{(F!Mg`=vB%;17Uf8@70jW+{$o|)gEROS|_tA&XG5U
z31hZ&GSB}JZjkczOvqt?lr>NeJ;4kIQWC@jub11X1V1X$x^BGO-k(4|qTzGoWbry<
z?jsszxB8W_0@|*F^dNIW)}km>E;1ygo>lULxZXvj!#O1~hRT&QE-*PrsII33`_DWk
zz@(5LC_|nqTfUEK>HSrRg))>ngUc&C=>`*b$~DyAU1p8cLqsPu&vjBTw?z)3{XtSu
z-^Sd(U81|9RFz#V9xIm%3*Y<M;p6Gi<N397<#><)2Kl;2^ApU7V4WVV&p@Y``|*D^
zC!<!vK&<l@Kl>H@ZMB2Jus<HKTEDVAf^oI`{3Z7J%M0!E7nm;)ZH1o~aDJjMk7)!o
z`uZ})d-~EN5LxS-LgAp*8nvQPwuyBaj6yjUNIbNQU*|thr&F(It7d9MTYVi?kG{@4
zHtk@qN?bpsgthGK)D@@4W&yjc%MM+&b>&^Kaq#E=v8i?uv&R?qYg1!E)2Mkh9?{LF
zz?LzSO$ALiWDnI2S=w}7aEDsiSc^sMwq~08J+@NPFz}SGc0plyTR054P1;=m)^<K1
z9&8FL)@pWj1vr~^6+oVLGCp+e<OyS!90R~bK`iYH#KgWLxCr(H7yBF{;M%~uLSvvF
zVpxfNRUT`rA!C*b%fFLD_z<e3TZ<e9CcC?xrmrqWjM6#cp?w(Yu|mKX&;VZt@^yT2
zJ}U1es0aIK1YxU>+^{_~Qb253j*jK!6hTTTdaW5zyq+bwR!=DPfG8}ZQg3hsNi(Hi
z;r`}j(~WRw<>KhB{K1OVz55Wb;f){|V|%(Iq;s0W8s0rD&z<4|rUTy{4$dHx_aiJA
z-aZ^$X1g^8lGVV<JO(qE3kYk|LSQH39YwxZfV$D4ZR73jq0{6%l#YH|ci`3uQ=~Ca
z;0i)C7VK<{_)dvun-gV;?9p%GW9RjWp6IY1D5Z625~oB*ombN>VD;QgB|oxP$X4;p
zg|mGV`G#{?Cq`Vv1yrj{6V-7eK=(tM60TTsq%k+z10qYWJM)n9-d}g=2m>2wk!$#&
zz|9@x*@cYzX9%y=hflkm8H&dSJdtEQ5bmcaxj`+QvZ9=#sLSyTbP~htf^a37Q#c~s
zL0QL<R|Im><W@FDKPAJga%)=M0*Ri)ZeBh4=A{4j>t|mdJ-N+%E|95za>D}(`Fbk{
zf!(d#$#&*=beuQcy-^dC{5WXZffnFMiM1f(708N&q0a6yF-nKpjVO<^XoHfYtwK<a
z{mYSbjrdMFv{VF*CGqd@2LxEr=XC?u<p|8WFpRPCtVRU?(F%ccx601y7SbevA+oB7
z+S54}B<<!DVtr|zpZD$n#lO+>_eJH@#rc~e;qGv+VYR{#jMEs58zzmwPB8#W>ze4_
z$E}~?kp2zp4oQ*zr1sa_`i1^zcC#Tt6>216g<CE0-dr`hOY82}#?)bp!@NM|6@$fY
zTSn<&!1Bc+%$E2v&W;guw%DO)%nY%ArH2;M4+R8L$j*x0x6;EfoI>d#oP*94N(*g+
zNP0kRW{wQB78hYq9<vRTBo9LpG|XNb7pC2+Fzu#<$_%F*x*%Y1Oeo;N83Gi*R=^nw
z`ky?p2uPI}!507~0dFYe5-%7B1Pr*fi+OA&jS9n}f!r#_L`;j;zXApn-SqMoggt(E
zZ=fcc9ZeJedvo{c(fUnDR0Pv=71U$~*}n(e28G32hl8Vs11qm~=T6qM7YhN7B7!%D
zz#VYvgC<Oo1YwY6h5Z<5Dd%U&BRH5u%rA4Wd5!p4Hqf@+KrI*~gyFO=phH74G9zdZ
z=gT0$3qk(Q?ma$zc>C<$Zi{jDXz1<aaEtMF_V(Fe|J%9AvwL^W4f>4<WtNO!Nde0k
z#98JL`EyKaSeM7l$)unonIO`W2|x!OhT2C8`d1LtCt#6ZS13t{;iQtY<gWt@R=ncF
z{1wj|ihj!;>SeNb{T8A8<HGP?MFfzX!h%XQa&b`LC=h3WrvPWj)lUc#g!5E5Io8ot
zNaEjHf^h@(qvo4Lx^Xc$;20`vGG78tDm{nDADoUtT#-kQBZ+@+5d%h1u=ZxrSt-0r
z3{cQe7+qmsZ|o}M`vl`4j1mh+N}D}~ZS=Ul{8x0;?8+fQi|pDqvyIKlW>*-dWBf+*
z`M9(KAL;W$g8NfgEc^sZn2TKOWC!^g_D9*ui55~iN2Lh>2Z7)L%9zK6D<x%3a$GdD
zi0lr}_J?>q^E2dk*wMrdKZb8nD}9S%tw_7zGOVKf`Dnw18xjme*5YsR#~jfIw2;tA
z)`0~xV=XnA5Aqy9B=&;L1d$=+K<dOHSjZ|us0)&$;6jnbG4$?61V+ewJh%`jxi6Ai
z*iwZ8faFb*V$}p9iB$8?>6f$EXPOfcxCsU7fQ&llUl(M=VYffc#?PR^X+~Vb6pG<t
zn2tem?EZC>B!P1Lr))P>=){pFDVD*OykfrUkY!tRhO8`S&ojg1&A<lYll>?C*N##T
zK>$ZUxWA!fdF0J^UxP8SvC%ww`DQS$_DcokM>npWEKQ%Juf6G;J9+r%JwY-hkYIJ%
z?YfAG1=XM(S9M!;hXZlA(q`MWT0OeOyiVH=E6~++<Dl7P>*6G_4(1$Mkv2P`UGVr7
zOsW?Ivb06QJyAe;YTzrs7TeDQtw=I)=CzL>EZwC&&WEK)nrZ5j(w7_b=>b2{?-rFo
zBH@kzFh5)0Psy-o-9iu-XyV}}Lk!?|X1CRDa{X|xXbEe?-e8cC_0*&RRw4<2iw7&r
z>%Qkf6r>hJB1!#<$Sga5klJk^AP){NzZ|*;Ua%3Jkz5-V0b10D=z4B73x(u`MCcTx
zo#yIB@ZBH?aBSY&J)6s(-LG3nrU!xuHp~2^s<!F1)UDpryoOJ+?xSpge@Lc6=Gu&7
zs$<>W8UP+^LdNkL$~^9QD_r|L(rdK`z906Rkxo;=IvSL(p&y|ArlNU&r#x`~2+7(8
zUv9%nuK-6|pp^3aBg_lY!8i<&h{sd#&WB<MDy&D7m?;N)c#!%~?Z~V`a7RYnCJHk?
zh77f7_l@#YOeG<W<NS@2<*}px&D^y#w~-|0ztXXny#O2b#rDY&UeDOE_C|OjW;Yb!
z%SMxE5luWAg9gZ@P6YUV%uu5dM2Vt?;s~NhfCL~)pg@Tt4jkRdo5v@PDTtaO75)pZ
z%&h7M&_s8&Cc<+N1$1X+S5-GUv$DR-?_S^iM0-Eu{fqkf_|p50*WE3gzS+O%S?#*<
zS7lTiP`E{v+^rBA_|C${_LXKcOw``e&SU6sf#_AnPCBf9O&fsp8i=UdR*+v(GI}mL
z%2W!gC>bcXe|m5;*w2rvlnaMDtR|?D9}L)0Yu0(ZdE487N=KoqXetjWV9dWlaLF#%
zPSK_9K2}yH5PZ#kszb~K7**%*!)Z%H$8f6{z@@_2RWz|=SX-1?akbTzjIoN2U*6w$
zTVc6SYSkH%#K`4BR|#o~TW<F=&61%^>*?$JG{}Y@Ev-COxb1DNLqrg(qBd<qd@fN$
zuqkZ$wwst)HYDga=+~}Y3-K^>`|k0+mki~7?KJK3@2H&PM}-D^mr8EBY`AnJs(Si#
zmaH|z(s^0S?oyW1WZqH2!_Lcwk31(rK5`=_gBkdr)|-}%4-=-bRPF7aSMOQI|5p-_
zxh@_RIV|d+7RzaBls&MLEptXrOGQ)0jZ88><c0K=2|Em{BWg60RCCvjY^pov@Nev-
z@|ktAf4~8SZHOZ--w`T6*~~S+;6itM3srzxHUeou*XrXH@c@FEdvi~V9qk}Efob&(
zAHbEPFkikChMB`mPz6Cu5{0&YkppO<K?{Fz@OEe-ATt-ae=uTzTkZ=2T-kRP$hBET
z)6}HwIf1XkU3T0%n-i7y`>KLC0nUp?<Mr!3_&A^GOkNEp4;~GU7NL?`B1BL+D{f)X
z;GutrfE*Oq7RGc_P8D)UsiA^h^iX&>u+8jWdFSRu<F@kQ_Tt62)elPt{(*v3+{JCO
zzZN8Nw`*fpxGH@?f#7V=&A0E^l-wONEEOF3xQouj8ghlw-e%wS<Uy>W*T8%OHXsgh
zoWX!VRg~`UuOvy2O|XZ6djgK1m0yc2|B?7D<UuAA<Y!H5_*G%=*{F%old|JDFwp`j
zHbmL(00>3OZfml?nA*ncQ}gX8hi}mLxu8`&7GN%(Ok%zwtn2-zfE#pt96I~YI!k>+
zKip<tCGE7=D8osbtRg7?tfh1R@wxIxJ)Qrn@;S14BM$Ks%}nHsjAf5x**CrP+Ni^B
z?MhFz6K+<z8MUSHhTi@7rl$wdo~j<s9&Ci}0ous_-%fN>cY|#Owg};e9IX$=y`KZo
zLCGKn8fGurZ(w9}UWix|`9*9~uBD~HfkZvb4Lc<y!yt4Xf^>~;3`b5cEh-)YdUHlr
zHzYxW-7MpDaV#35nRpfQA4~<DUI?aWIb|T9)zo}<uOve&t^onOkGmL6BK@-ggX`#s
z5D#Gq>x3|Oq~hbJxXX%g$HpcilnmH-KK+Ck6hDCezK&NR5tpMx<VGb2OA@JDlSTXq
z20OZute+XnEkc$)?5yWYQ!*grH7C}~@T{=e`-TMKI&!*PgzYk6BlklCZdVaKtXTr=
zVS36V_PLjv4G$AC5L-+K^x6u7Dz1$mP$iPa^M}gK;3gTHat-Hla`j?&5~$fOzHC4l
zkizzdiLY&U&{+(GJVik=FB8df&!8hVg??niMhLEp;va&y47^D4F)Pj-oU0tp8Y2Ge
z3XE4F0lR$I9_};An3?xawb|<p^wj#`4sbU+m}GR#J--0WBj<OA>UjugpY__f+FS0;
z7;@MD;NEsO372%-&fQ~2c+Lg=TSq)#ixX6VGhVsUMYqd=xcvDR01QFkG3@S1!vP8W
zrz*qW-@7rEAZJ~PC2cQY3~bb>CdjBV`D8}V<vFuxKCG=0xWGIaXxlm)wtaR@p{9K1
zHqP=c2tftAXrajO7Mp5b&-rmH#I?kF1^Jn%K!{5>+Wjoy|I9>I9~m&RG1FSzNb@hb
zAGg}dX?+HPe1y>}S=c^r#9sxrI|1%j!***FGYzn2@~ST4T6bAgN31V0WjbKF<sK7Y
z^JqtvXMlNt|JE@IOLRdc!QgX9D>a`58$0e%bi}pZt92FKlm`U5X?Xk}J%sqlKYRkT
z_wu_=>-F1%!rRu$`P?j5{ciQk&is1k-e%|4;=A?5pB{Wi%R``j!Tu0wO_%_&*ARd{
zrq@ciF1GaiSHl>CaoE58-QWN2Z;-TMIOdVF-yeg<HT>O2Yxsxo8tB~|HAnT_U_3gE
zafOeGPA6g-6dmX(Ht_GCd@5CE0OWs>dIGq3CT$;|*Y8SxV@mzP%S<F1Hk;o^eNPs@
zk36jSa2h@`SD%u!DRE;qbe4X1!o5%DVYvgH51U$#0}q)<4{a|?4`CIsv3+?ke)-_}
z<y7C&`xF{3;che~^UVAr3G+CL$GiQL=KVs%sH7)3-Y+nWtvs^c_d=w+``l|+s(Vf!
zq2jDQjjl{`_i2|CWBb^QbX0O>=J{d3wDayf0H5A-;|<rAOfrWifs)JriMgLx;F=hI
zl4A<9oH3>QbuQs?z<cy_sIxop{rxwG>F@7DwCl~`(awmP#=672UdqvqG6L^eYfq0|
zQ#OxSPXoYJk84W&sGPs94=E$CKuO269P>B2-p-N%!#}5SYS^=xWUCCd>(#BKj@C^D
zSi|%iY_PJa#Gj^3Eya8jCH7mj>m|?bL_Vvgpq4D@H;=Y78T|xLZlM*R5FO!^p<Xam
z4lY}X!WDtDDzm?ezx_a63lvRc-;1XenG6!LFx#Be93Zm%v*I={1rew#D-s4tGLR?k
zudb+FmI}y<*+TR>R4rq7L4#W%TfX<KID1xDB1T23)T{I}aA(C=e2X+J+N*`3vl{0j
zu?zF^c-qy)cdxXa?C33wyKPX2uZ3ThMMAF3jpv)MmyZsV%i{Ls%GQU{-HZ9#inr}m
zE-IUs4;G<9@51cGLRmU1w}la}C-}F4wRday8Ip7<k=6>cF-!d%!(c^rVh-bDmi=yf
zW!Ul*TqTSHAf0I2bo^AB)=z~#Rq(_v1-&WM{Rsa)wX5$2@+k1{iincKrB<Z#ZCe;M
zd&2W<w;xev?IFhEm=`i55O7d+mmS(a-I+MuE-T*7N^e}w3hk9J9)}S45b(Ka3Z0ct
zw-zb-W@X(X@jUuHOr~hX)rwS*mml?alCXd;hT^DzZ%G8@wu(SkFB_5jPtDA!qsrio
z@9#l9aiV-rkJ&bKSXbWM$0EJDnNTGyD0jsEoPxpy!+9)Su5m-h{W610ovPcW&X6%h
z=(ar&rFe-Vy?rtPK@I?>W8qy)TXeZ~$NQ2ap-p~DGNqK%CVF_KU?kx#(rgh43*a?6
zD(;r?+C<%LGlENl3+FJ+^-GK|QQqn@#hd25xv`(lDE^Kq#mj0$)y@9O#;WlCo!nvf
z@1)3XP8)MfTjI`H?yTfJ6DbF+@$*-c=dYGJtrl5JhzE6hykR0xpt(88)H-xJUE*I&
zpeZqEB#o>V(*xE*bvBJ@YBo1&WD`*~4oyp&DszD4`F@$6v_^Uo*;v3jr&qPV9uPk1
zY_GFdP`DXjbC=I%R8P83*}xXxz4eG~^$llT|BbIm>No%R<NWrI3%ftgf7e%#S<@dT
zewhES`VaE~LTG(MW17E6I)I1>u=SPl@6Mp=FA_k@n6F68f81L7FaP>`e<}a&Xn9|e
z>;B^gn#ustK)(QB(qF{CyK(=%#Qpmc4{#y>I|&4mej+(vkzxNEkO&~+|5M??#C{^c
z=;$Mo0+YLPeEYa>@~cM(GC*{(pGexj_i58##_+!q`QjyO*BkgQX8pp8!GekbqKG~s
zrvLMA`gh0t!dE2c|IFq5-(<kV=O^NSN5IYV6XAyGzi0a!rV;=7v!j~90-1fK*d6^t
zv|GsO)%_`s!DZNRG*hZ_jrk}h^9nRGqNqw-ABrc(u-=e9l+QvPa%Cv1X5wrKl!QoV
zmXeyU!=BJ6I~r%-2a|br0R=z`X)PHMn6C)OK`7wMm<W&+Xs}T|Hl*cDs@}<*moX({
zj54kHBz8>=7ihJ&(X7p`Lj;D_*as>nc_?tPb+I+p68KOix*4G@xHXcW!`h$h>`D~3
z--_6{h$Sl@p^Ammjb)3^Zd)jRdb+VM>RDLV94EA{lS6L;J30Y`d2r+*J6-=RQ5sJ5
zkRa~hl}mJx6x}C=L%YKjQ6p0n*<8nuG{h2$Rj8sVF@y86q)enn8fU2HrmNJchYp&X
z8JJtlp^3N}Y{T$@{hRfDLBV~-k{EIuzaC=J%XrZJ&M@fVE1!cXq%*^fKM-X_X<Rfw
zn!y%-4rsV-*P|dLi34$*R5@#vw&9uJB|^F%k!3-7Xl=_@#kj2_Y<UV(Yo}H#Ud2bM
z)KSH%YwCVzHmrxH;CyKINxf@m700#xADx!xqcerA?Wbo6pMqlWr^jwEC3;cn`{)GS
z;Fbn`d%hZ#VNDqsGY$6AORHv1MW*od(H6WLuLFJLmXd#SsE?@ZunFI?#*KJZO&{$<
zlmvTSvLnWTsmAlens`ZOvg)8Q0<4soq%j!J)7$cBD}^QW^@Ncgi#XILj8ulL2-CSS
z_MT0pjkuwV9KB{z-CIa}qh503$a~amk*H{}r4$D!R)QM^UCRzYxjO8eImpb(@3nM3
zcSFk>1MD51JSxMnl%7*QqoIBx?7`<~56B!iLG0IrIWUb;otlp5T2f0ZNsaX(pB^&V
zq-PWCz{hQ2{n0R$PhoI6ZL;Q=g;R$OF?6OJ`jBRwQ-z(o0djdWuV!_F4f$xD-4K--
zt|Sm|GZDpQE5FEVWDv)IJz3mq_Q{6JN6;R!>$5v)veAK(E<`;$mC6ssyLZ_#1{apQ
zzkI3Waep-vsts!nlTQ0<RKtMmK8z@?=4sQXd$2kUHpj=B*3>+Xmf-s639!)~J%`@o
z<Gq>E=;+Z_l0EB8rHR*p%itu4ma3jMb(~IiFm0p;vKsR~*c{|_(^PR5y7vhscLZr~
zPayhXhwlLa`C;Rak^)2m0A&JHC@TPJ3(sSX;1!ar*!zK;c(i5_k2Y{i5H}T}F;SBi
zQEB_MQl`yC*n?!B_QXyj>`RtWzKM*bplI1q0-}nIOV~&D^_eB)+`rrts=wrWnLP!X
zsx%&U;y!JEmgv7#`j3XY$qOrcr$j_(<FhT!tWh4s3Le_WLwtKQKyn3@X<p^Y71f0Z
zPDAA}b^b2S?4g(^KCCu=rAZhMuw5%QYC)n&{ei(tZ`pS5!CKo%hq(1rqs^nHKy{bI
zu2Kct!824|Vuuy1s6gzHE7YJZwxQ~%tYz)y(m^IPO`BY&<vYY5JBM3eB*k}$>GDpp
z$}0A*L_1?`QDGX&GTB|JU?RyTuVTAST^0Rb!{idU<De1s;%>8Zr@D;uQF7F__Eutz
zbk!;a#pO#hoqLXvwDId94tiW>`&$tWYp8@dKIMHJVNr5p{0~X*1(R~L>9iDsgSyII
zplB4xaI2nXLQg{dYfUVitaoQJY2eG#T0UV68201QOr)6AVKx!8>5jCTbUwuluc`HJ
z+NX?U&frEvrYx<%p7oCRtWf`{dbjg(<E&75zq!+?t(+AWJ4^NV6R&}kx7fhM=F(a5
z(OIzyHM%D@a8G{^|2j*gm7p9q9NfqK{o+}19xHf%jn&htXT?b@<-LDatYez=1b_sX
zK=B2V{!X7wJOtzb(^-55YN2h9JXDKeJ!#>!2un~oD{Q?hH9BS1^ChSd4p*#Wk?<-z
z>jO!+_3q}*`-8c&!tz;x&B5eZ;V=v~fdB^1RG=2m{lU!crpN^<i3A}!UBQasE&3X<
zG}Jl50V|WEILG-#yt9xZRN%(KLy+>vcpi9+S1{RVU(gp%Tl%QzIn^I8_e*kWqJ~vG
zwC2$%D^IFu6CLB<BAwjD4ubwYQH7^0YN!|3=c^GKU9`(=5c3?c0nHl(@PIDN*#m(=
z*|3nPfORy)OtcUvvX5;yP;SGTC1)}AvbcvO3tB=6V$`T%0Y)nJ{pBE)CwTvwj$E$|
z)=~~l;`xgONXgI79{${#Q!XdVmk&#q51uPOH^wjKA45_XQY78|aQig}p<I4@Q=tli
zAHKQ^(3AP{#rmr9VR7?f@@B-9Bo<LF7gpih)T(k>ngrq3ca@9Ak~@{`a&_|ZVO_cS
z=BaYIynV5@&8}X$c!*c`#=1ZcFTN=QH}&PWvzPPDkdPM$1!E3aHy(wu$0*UPX61lA
z;^-DAfM{osDN#Cmh&ARN#q6Ce+DJrQj3}Z!y_>-rfixBrri>De(`FHsbOU0%bO%Dx
zK*O3NDXu^Xb5q#yCSrd8>9Z%WJ7o(36biWGQGK7_LRRXX1t^-vOulZQ5b3XB{r?hv
z_5>F-`P_?(3Lq0m)SW|E7Ag<1a8}z^<DeOL!kNT0G$%-HQJh%CMtAY1vJW!OLh7oO
zR)qYNvT*FceToWj5%PnB+l9U(_w~TjS#}iOs&b?fx-m|?Q%U%kWhY>XBsR!f+!v52
z+pT0S(G&g^&s8vi*vId)9IQu9e{99!v+mOc+4**0=Z`Q~5m(D8Q6;q0*BK0i=CTqj
z55<Ko%Jy*rRPrg3#CSQOT9iw<ig}UjCTy=GBCCbxN|<B`6S!SRv3>l@4zrO0uxkMY
z+Naw~?E4;^;gs!d{CWta3p;G4fzEvw72jds3Hr6d&*PUV7u`FYs35HD1s;4z5^DrL
z3(yrB8VPRN+!0rdt!!T$!E4mHw)Gt$arbyD&``cY8CHpdhO1U(SDT$8_;dwD()G+6
zrOtwo&@79qBUl__;s6A<ibQ!eg2558j_B<x#G@B+q2uOGH&>!?8tTn9fE5F=bnHz1
zbZg;slPzmDmql&bDhJ(d?m9tx3y9Mrri_N-u5nwXYZ9QZO`kR_&|e$7vEu4b1=Dah
zIZ1~{Qf`DWeQo@^Mu@^`B==x4n*maK@xrV>qWZ3z0c0Ns)S!~Q>`Q^2u_!+pPKe?j
z>Lt|T9-anJVlb2<$=7Hm_rqih+b{lzGD@*x5ivDJ)ns~%FA8t4PqbpxhE0Sp<`Tvz
zaEd55)KOi_s>9z;4USoHV$3OyP(+yTs2QVL1oqJ09B8>QE9)4zNZ10=0fmGv8A=SU
zF&Jm-!8Zp<ebjh!pDk1e<MO~rh@2ihp~&f<Aaa_FBSMm!J4#w%OW2g^h$vF@^l04~
zKJ{1@AFYpQNjrS1Gc5{$m|XjbM>~Up;gm0l6^It6bvH=US#=_n*K>y1eI6!8L{lZv
zo8Bwq{~5oHr1vLt0OABP6$AmEOdhvRW{#UDh2xh9<79t$6=L9>INm>D3!>v)cJkH9
zS4^<M{?3u$E>K%EkDpRCW8k%DGNDCJbZA6D#35ixXR!520r)YFUm-t63%@m4_n>P{
z<@g1#x-d!BB_=R|8XK$^079kfHwClH9+pa8fIRP$Y3u<Ej`e=&`1vmdRJDMmZIMmg
z5*gHxC4Ea|RG08?OMppdkin%zaMQ0Gm{b6~Dk5FG=0LInxeJ{rU}iSuf441ymMJ_p
zYlCHZ?lw;ZUqSwsIv%atr|U(F560?kf$D2Egf=9L5tk7VS{#;w^|aSeuqKeSHF158
zy^WAm+1Xk9LKQ6ES+YevGS(x+Q}c*JbZc!KkBb!No*3@4tJPSoC_{b|KR2jPR@;7=
z^t+^?B!I?kMa(wMD`(LO0ygF<>kC5SDJAub8MSUb+%;!!o{7BuElD^sA&Z@f@-2Z)
zrjlm>Fm8*ZkA)B<!bPyYIeY{Ba`dz@jcN1~m@GezG&~I(#SB7#QeNhjQi*p5PM3)4
zM?vUw(W;-S^i)=_A~jFT;)mT}&8d$__hS(T^{WaHQ3MH4i<AB!7@Hlv=_U?pg{nBv
zMA>yby-Gy~NZO0KXoXKD&h(xrFk42ECJw!fy_;oy3IG8or8t`$RE~N;x+vI<iZpAo
z=<tU~VgyWDcQ_!N-p25*RH(Eadjs=iM>HIEX?kH46yPR;2GEp<jlepvSGRI9%Z`pC
zdr^z1lMe9x`-*+trE3X(t``ct=&_a_@+BSbIysUeM1}NPLE{9ydd>uIv*wFT$r3iV
z4bL64F??j~-!j@5U|u>|#LUxa5J4`cmv}7!f+r>>B3&YDatCc&cVjO2lr?&W#s3!B
zRi=pT38r8V1%DZgy|)d2cennwHU73$p%`~(ss7W{m*=l*o%@HK*WaDr-h4Ov;=BY!
zA^!JZu`{*zwzbn)oO-vg&5pc#&^{Y4P{|1P;r!XF^H*!E;k$=3MAyWtN!<LpQ$Oh3
zTI?LocFHTA`L*{GuR8}XJLM<86w3d~v3155x(3EB*;&2A#zuJt@5iU!zFOeJt4*-6
zb#7MQzMOvd)sxQbUe_ScZ#6pKP4J;MpE5n?<(F2~3};$_%~-Yh)G_84jxql#vk=sQ
zY|aM2OQP^q=B=x-fc#Fq?V_nM^NXZ>>ukVs7>+gr$-EZRMs)eBVU|WLnuuxgP4gKu
znn@sm7F0o(Y!2nD*^(;Baf)Huo4EnQkk76dilyY+jB4o-T^p5d$QCv^T}?(!Eti*n
z1YND8LwWg!F~Qbo*?>H5uJj!>86sl_%0ZeMut<k!n~DzQ<ez=k?k_nbKN!!b0r}K=
zvXlnnhGB+ygdw|TKsB`}Pge@9fNIZ#Xq-VK5#?!0>?9M0HF-E?S+?Sjl9F(fk<F)b
zdP@FP0&UqNp3~Dq?3I#dFMl;3(~VGi;GL3DGa(bkqNBpzhDuu-WOka_D?O>_0<s`>
zDTt-KD((9><oIqgYUZ-~kRD|>F*$Z4+-<aU<>L9&Kw8z4Aty7b4}4tKUMy{zA%4^j
z4MSR}-@?b`{l&D5E^lI>8U38qSPY9z$twWppBaT{T1u671`<OVc{`j)=>a(@TnUb8
z(@2K+jb-!6Fb}n<rnReREPUIh)r=Vr;b$aFGp?G6jB19sm*$u`lvjgNggRTo3oy)6
zKbqBZ@^{2Y<dgr$-uHdAab@ZLE4AjOyVtcYou1okrXTKID?Lfi^qNjOx6|F@H7~OY
zsSFiMDpHal_z8)hHg=rEma!citXP1+AcU|$wlO>?em4)9m3bo<gYAG<{|oo*z0WyS
zC6!b^z=^vjD~^z=>YVy@s?Ofu-uwI1;CICMXs}01hMMsEdX1!!Wb<b9td@*Lde|FJ
z10yGRNA$Fjm!c)v;9zs$V7k|@5Us5%Q;YSg=?K05L)mF;`ZJ9T^K{ry3o+JG1DY1b
z3g=9EWP?Z8(%>WDBRO-2@ghD-Yo^b4B|FHN&M5#Dwm8E)h}8f7yFMLK>hXs1Z;=mA
zplmv!#dOt;5bz!5v+B53;k4QQXaQwOO&Loa=0({<*@f-jH#=ZiR8vr0$u=(8hLR2V
z<P|cs11x9ai;C>KSJ5(^XJY|QwBILhpzY!`@!)&aBnu|{gy_IX5nry48scU)`h@Ms
z{S^-r?|YT!KLeXo89xh{oxDOh%N7rOaKgbdW;@7?g))8?Fq2_L=6pB^q`eA+C1tz3
zVrtxzaZXYa4u65Eo!PzN0w!JF!Awj$zq)7DFr^Y!F$-=66W+=wxmVz6ewYX8pgH$L
zT6nlH^m4LtxKcm7dKL4>I9Yk1s$t5bl4(pcoQfRQQMOV+<Yu*D5BEiCXuqj&BPm6h
z?s<rtYUQJpF&TvtB+8gTQNZtG#M_MtAHX?OWMBMoAeJ$YVFm-|W&A8q(7l;o+pA((
z(rL?4*H{}+25f{q8J|>%f*Cue@s~zp8J^deCq-!+mg^^*NWpUv>0CCS)Y!^8TkNl3
znZjKZ0h<x=vLP80bfJ3oN3B?<APq(%hA_4<Bsg$ZC=60W_8SVIEDGXgZ_dT(=9AsI
z_T)Z93ha>pptSM40(Lsa;?4cV&z?TILJY}185yb7i@R5e>bPyqvTfe9Yw-jF|C=RU
z5{^mgc*Kis=PuH43D%+aoED_?tJ4fesSu$Gr)*j$!6v|C*V?qmngpL9!%Cbu=F2`}
z)C^u&vH!`Zd)ZI1`oLo<4M~1TGwhU|>oDMS8<5HnQ@L+0w4H&2r*6yn*exsAlJf91
zRiB8;2Tj{~p_Rk4ZG%taA+t=S21l?dK4k79*V6Dk^@o?%aM%JMt#NGnzBa}I7tjdS
zCE5!u^CjdtVG7k{BJgW6CaR%xpk&4oSBnT}SA<GuX;yUzZ*~OAk_cvm1P}4=uV5+~
zh$%FgCSil>vL|V-yf&fgn5p)JK%UP2y$e7c1nY={K@lMws6_zbcW7-;$$v_DPcu=w
zB?-}y&X5Np5L>%*H;6ICg{;Iu4uTskBH79|aIg50HUOSCBy1VR9zJ#_w!qH{aeHt{
zUcpDPf#1EL5DvF|4|@XJ_Rquz!OU1RZ%yM|s!`~THx>l5#Gn@n^UtQym=X6ZB5&C_
zF97BbS~8|5bd58n_LyvpLB&7i=ysI8Knv9x8^{eHm;zE(XG&Vt<MEC|hgiOA_L^{)
z>U2A0w=CRWrh0I1Xl4oR%1xTt+m=DkiTQrr3yV?3wR2uNp61>8%Ao+U352**@+<3v
zb?Qa;0(16@^UvolACwA?s#;zXD|Ly#z{=N-nj^%wJ%`1X1vD}X9coNDlXl)Fdbc9V
zJU2v%>6jPZ^WAFY@ZylY2ytZf@WQM&*i+>s*xYEGraHM!g?nSR&4tjMC`qF>UXYJq
z2B1+fO;+KyD2<x2D5qzsz@1lUP2onM7w3ijAer<q+I~fsS{y3J$#XJls_Nm~(^TWK
zfmWp-ObDtqph%-Y#Tkye4;$q(a@ggCJliyuTx7Azq6!rnrOx|dH|pO8W9kP<<MHQT
zjmrWCdMcQq=mR2pOG{~qJ}pTV3h(nbk$5%vQr6gFrpX56RD#8a8T6E{NI8}lA@Q=p
zzCvmx<lYl`vpwIwBOzWtvsBXnMeym9y|dw2W|OlQ<|J~OD>CWak=boej}|HG+>9*p
z+-*#l_CldBPj^hpdb#ETXDLrvW<FDbZP^bBHH8<g#;))-GoHt>e&7+asCOoK>A?fI
z@H^Lbrl1y~hWV`zFay-@B^H)oah}pK4~FKS4?psOOtBXGUrZh>UOgyGP<rD*exkXl
z-R`qbU;>{?8%8{ZC>2^s;x-4QvkPMj*#x0o4I`JB5iQChC0ACEXdRD$t;y&q3yv;s
z=!7+b+;p97eQx18NwYQ-O7r>giV56a$wncSHjNKR{3_K4HA)%Iy2bul2G%3~ZURE1
zKy_7Mb6Hkagu_gcKhn?v;IPMTkp+?50JH30G}BbkEF)FOl%@D=+AKEojGJw?fc#Rw
z$Lta4xvvRsgiiac=OblFJ-0XW0&BaFSvNyGN-id!1Fu9<BO-3%G)R_+vtUT7J0MF%
zsK?Bc?HESk1?@%zE}QMf7Ch74kaWS4h&N}NEK;^2AgY-*Gn`FE7q2atE#rV~+6+s;
zIh!rZAdsi4ooT~BU93$920A=~U+#uhf(c@ox~ML<*s3MLCPLgJ_kjdOu%|dsR3aK#
z#11NKtAMB~GjLyC&)aR>n+x2|<;bdXczNWz+EP$}wT5OehelmZ-)6ptcKEAgim2I)
z<f34hD7g|xxL~oJLEcxOBjaI@m$?v33*NgZ$h2hZGCs|9_ouUt8_pA(L0}D;jy8QW
zD(q_A&)E$8B2g3A5n>XL;X*Yf7nPadiZVXD0}d`r?_dgDgK1gM-3%@(q8ffKFwFd}
zYM%S*M(muf0{<o;&bl%nJd3D~fOCs?5gc;<4NkOLI!W`a4S|kXZU|1ZsU}AxC>C8{
zehGl3VEzI&+JZmVMS)ZY-vu^Wh4SE?i&#z8Y*GgSu6;Tq*;a77lW)?DY*TCMydv~$
zEKet}tW2|KQ^Rj7SSn~$Z@Qr4c26hQ77Hg|iT#+j7aRtcM0RQ;axG-6v9n7h*8}=Y
zA;glemYzQNnA{@rrJmg!A$N)}_5F?6{n3pu$hy^OZ5292M~mQfk50dDkUg16sL^!r
z%xGxmGZmMg#ozs(-hsH`vxG*riR<0HnI0`2?>^%*XV|rA1TFbv#s#z<H98o{Bq6=K
zDJp=gls~=u+tbQF8HqkMElVD}q5^rIqX@no^Ks{FY}eb=A^E?|shmLa@o|OIwgVwI
zY~>59uW|}+i&?rZb`Y`qnX_UC;4%B&4N^?o&YMzE&H<^ksoJr6+@WXMq^+k$F^$a0
zjbc2_Zd;O$n0t<%O3b!<5mRG8%b~zsj3IpHbtvEg+HWR1jo!wB317S$lLuGby*mml
zx8S!8@w*A%v*>!|a8;3*Y`tOnMdSk|;;bTAV$>hlcUi%ZHHE9dtn&id9cOl%`hx2N
z!cycQnOanffhXk!x$g}x?d6t`QF0ULCb6%l4?a7%{5gA_dSmR1+!2v9g5Zec`RO_6
z4!4k*aho^@y}<Ot+Y{fdR+yhbJ-xhMe|h6Nb2R**=>U&Ukr$z$g`N}ib!5!f31DY7
z`~`FovNr#qqlU6D)FJt2iY#>NJ{Io_54z@k6UmxhMB}!CpLf6(rp|zTDvM@t4*8*g
zNXicg4wfyEFx823i1!D~MJ2Ro^C)uY6Yu`_9wWx#?sIHFodfsVJf}X+4JNo{swF%h
zC2o#7pJir*stUe#nH3B@{7+BxyMU4RP2jEJ0eXRX?_`yoVO&~4OSB3GBw8msBRj*)
zz<jVX1~SyR$m?*0{aC&yh*ws)bx4;Ics%ad3`Z$XxWM^gRyyVy3sVp5uOcyKWQF;b
zxARtztyV!ojYVMz7CjN{0z2VNW^84jJc3V(32XqpPDNZHpg0gT4OJv~$g)lST^*qZ
zxOtn|TXaSCM>qG&_dVVe+_17ww9h`TyR@?#RgS3`rX=Q{BnZAv=<GWhZ)Z#+p|B-T
z-livelmR{6tHf>lfjVc?NFuF!V4&N_^xleQ+k*dFLdybvEQv4Egl;d<`pdg>?x1+l
zT^@!e`!9s@2bN2~VO1oCK0|FC^FIk(EaJu_3Nw#Dc14_z{ZOSq2%ZD<84PUqFj;U<
zb9lBG5@2VmknKV$84{Wwx54ICpkNds_U5bS?2me}K>IcqO{kRc>*j7cwYhX$sbxsY
zmDsM^T)6AzZSDPd>Dot0=mjxIdDQTNi0YvKWHt)yOG>j0+D8f9Y0?H2CUIK0DgJ6=
zZ#;~T#B@!0N4}u?+5bjNqs4EU-J0%Z68|aDqjiT-58-D`AWzh3FeAEySHe^bt}DX6
zf}IFK783+N$e>OMi#lM4<vnrc&ASJ48-pqkYLRbi*b2llPANu)R<ncZ$MA}&sQp?L
zH|KBcq()L4!fFq5jAX}?Zbez>T+E7)3<OnXL8gJPC{kF3HoQq$<jv;3Jw->VIPjG6
z-Nh@UDltnk7z(-&<J^T<6i+bE2cn=(L8Sdy;t*S0=wtDY6o=TtqaK$+5@m--M@8sb
z@Vxd=>JjbylbTqv#kC`fcgSlQmuVptDFea7Mg%d>Dl*~tys(UA7^x<dBuZ}4CU>XE
z7KXCaC+LXFL9*vnYv8s#H)%DkS#J;+zi~xPbn7H?)2pEbuxjdbc?{e%Y8ZW*$)7AX
zm;<SAQ_2(_4Mx49PS*}1U2*;xL+lGpX9E}0UXia->51#&IQg-$u9!EdH@kxYMHlSe
zAvfHJWhq-Gh9;0j$X;PT#q!C$+<Evp|L}R?A_T{BlY7~jy<8TW2)7c&43>pD!|kBa
zv(7%JTvEp`GK7{RntZ8cKq0jiv1yeE`bQ!3R;|&uIo#Gx*gr*j0z{)<Weus2VGAo4
zs;(UWdHlJC`&=Wwf2ou}y-(RV`^!!01VICI(r-SRzI`}({^&wcVV?7M)kjCun}>zz
z!}Xz;BbB3D*OkNhiKAQhfN%fygygwD%w0dF9A3V0^u?!^&wX@j1X%NdAfJ{gFRzZU
zzL2ZG6g1N3+du!<meN)ek+wUWBp>=HiO}G-ol(7W{BPSC(UWxXMG*nCSn8{sn;&*{
zLol~Hc2&ZX4iWil*PktHn8$Ni!hF9~+tDxK=Nf)m%WWq^K}L&DzKk8`j}vW_AjYh~
z2|#qGBoGYi#g0VqT$x9bqq9$nJ>qM8HYb9MJkq!&*yDxTk03VB<4c~?!E@ZibtbV$
z-Dx_xO__);I7@SL!OdO?HZKq)L%iR!oMD{vj7$FVJ1!wkDtWnteBfJ3dr)T)d)USp
zDr>zi1E1RzO>L74DI4+5Z9A7R5pkFkQdE>jL1~s#;>#8$RGvUQbrGQ?0-EkXRF$Jl
z<49gz!{y_+K^eKVbGs$x9|#t3MZ{evIB7WWUe72Z+M2^KEQD6e6p*b@xRpm}-H|M!
zHm?vFxj3|7E(8Ex(uFw3kuM0xk||&es7YN_&MEOsH|it~wqRnqy-tX`7+N9|)Mn*?
zSqH;jvsw0y`Nkc@!<j{$CjM(R83UUn!JMBshuGM9%ZLehJht3Z5%=5NnS|(_D<EeJ
z3U0EcL^(aizSR}vWv*Mi&^hd;9e4+HqB}##vWYy-Hi#;~(L+wF-CxtByT+WgbwZMu
zl>|kxW@DB)M&yeyKMe|9wHWwmF_RpRNJ5W)&{)^ZfQ3{TIIiuM?+6U>CK5E?b8-}C
zv5aa#(h^g^dJ$JtL}VGl(ZmW(O{<bx`ZF6AxowMKFVbpPTI8k(c%HVC@$4e@Z_zuC
zixr#^Ie2-O1%1+$fY|mH2Cla_C2r-8OyU((xjCx!QUsT-U?2uE;SHkub%}~sas3>X
zht+r(oTB8bllG|jP27-x*cq3O@^}P`wjP%7lURCZ1cxy{rb#D5a#>=!$Bwjbv`bFT
zT9_}##IIS8;LbJi2vP{%4GX4Ne2vmL@TsovoTocPPpU<;n<EZiOfMH?zpZWQki*9G
zpFaHf*~9sR^W0y4_VEhyE1uSupKoWGQ*khH?fKB?{^b?g`1#5#Z2+Zk2t|jdsf8Z5
z#-3eWbpXiZ*Sr|2GI!=hc7q+lUp6nYqX24N^nhHpKDd!%$0H%`lVOofo0;%Q0|xF=
zWs{tmN%#7U{acNY95vn-xZN4{X7x+s)Z8ipXBxcm*;H@PCW#Pjk}xNmnqMw7tDzZu
zU#v;Ui($fW34Vy!D(o%?dVMloT5Zy5+$V9GvrWiDOh^0vW587V{IhD>+G_EnUpl(x
zR$&kKOxh>auGQuvo$W91<2u;attUgYg7Z3&^qh9NIj5C>V+Upo@T!9oP189o#ouXR
zVRwGgy{_dXb9dw!b*?*kg?T-Xny>1yGJ8%(sIRcuJvRLu_O%}hL?Ct5ne-Dr<0qDp
zwH@-bgJuA=!R}qercW$M=(JR(%OQuyB&CKomus}*<v}xu#I=`0;)Jq^PAgpUc6e=X
z_&k2M3NDVHWh4$a<sl$@0%=I{%2>4z$4E;0xC!g5$pQo}5L%n`C;>i!)Fta3)KYA4
zVi`@RrFPyO1fmMN%aiV<18vc)aH&tHrF>iklm91OB;j6RUX-7gOuO7DP3A_i*WUY0
zPo{UFaIjRV8ftUd6uGw80zXIJK2@rDwm#Xcsnb%Y2ay5v=3SEIf3PGf`^EHiA)Q%O
z)+<e>wTH;DlDB}TvA_CktLB^j>JqO48Emb2Rqi}39aFXPyc0N<D%oN@iKa@Hm`K&a
zUAk;-*=+XRICHGR7dQJbGgCii4uwj(`tkamZ041n(=yTg&iFy|u!IAX;vUSa&&gsv
zK;T9>lSxLwm8Jv|n-?Urv|jhTtd^DXvH5<IwPs#HvesWurk3p;^<%nLn{+K$!5OTq
zx6bR6z9mz)@Hm@&@mG+_WsMoTL;f1)cG*4s9}~PBh(t;xcy&nXQjUhRFR#~mYS*hw
z=sN9=W=N!?EvE5fy4UxX?$zR;zsk%nK4&=7zKUW8TmLOmz}lc-f4eDRw%7HKd13B(
zVd4_K;@mKo#4y*iFgK|2uQyFB#Kk?SR56hy#ylrzm`s880>Q%RK%Sg4ckHwx7)I`J
z{%w$%&s*tS#BU-v7G;8&cV5|wdq!EwNo`UTiuD#<N|>CpFvKWra9>$0+bN4o`61HC
z{@W*yxnkfya5k9@SpObV%2r$OagNASTA3}O?D@p_{^<I_C(AEJFTKc4K7BMTbI4u?
z8FD)_%jrZWIGxLBpGhpTO~Od)X9BkQ$S`{%AHDHL#J>h{YuhtMGM$RN@$;X@`g|%M
zw(H}!@BjWE|M8c<io9_my(RQiPzIi+9{$MIz7aT=$l1p~{L=S{wCy80XzKB}?(=Ng
zcB7?xb))at-B@?&B+gCt_P+w%bU*!_=;8hHM7#UN$)4;lPW0e@b+QNdt9Az$DC5mg
zV-aSS0|TwulJ>?{D!-O1z4F_t7{w_lyK~E&FhUX!^LSR?TYAFkWXTo<t72@50<F|%
zN4ZE_JnaN}gXq#9jB|10@s^fsEGy?4h4fdSvmaimYQIxc>3+>jYkIu%^@DiL+aG3_
zz29xBF~dZUN;P{rGM+<-rE#9O(s(q5xP?1rXUL?o$=$^Q#S6+9@tB0AiyiL~VgFTx
z(hG_}+lz?2R}pf_3-rTJ`j_tk2FS7RjS0aebhJV^YF)@DOd^_E60Ej>@p>j-6{Zib
zOe=?#`r%CFXwqMr)g<DL0M`-jR&@v?2#C;EsuaJ}OS`k90zu2+mG5}u1R^aHbVG=a
zOm)icF0HYj-Wg3*#4Om}OG}8AR0U#E-z|0AJKz<6Xsit+jRE!Xrk2z-#psUeUyb4q
zwIk6IhZp#r7LUU}c|8v3`rih}0C_&abO)YCbjP366i`qCKZ1E08(`*p=M<5slyE9n
zCYUdIS$LnTKEx__E6Y6d2Yf3|eVG$W$@c=UP5CK0c$_U$LFbH!F~`Uu#D>f<yYw^9
zq@ml)UD+V&Q*eyvtgGz*N9f+H3zy4BBGB`DGtVDA+P}Z{yinM`(Q&YFH{*Hr_Vdm2
z?k(&N13t(!=Kzjj*R4!yX~^mUtmw=cJsLP;hbZr7AkW64EXpgmNG7@5#dL?fizYzz
zC!OXm7kPkiSsvmj=dK`4(w4}zkOw-~bf7#9n{p|*+<`+hV43&Qa=<0*q9SrHXrnv!
z#BPUTfPJD|5@}4c&@PNzfY1{)-6l_^Ur9BVAI}>7jRj1|=r_(~0_I-0DT>-AU04Pg
z7w>B6a|4=rmMuQ3^_t=IMj;b|Pqw-tCujmX(`KW<hO0pW0<pk(63xN@O8h5{1z`_x
zsQ^B+3zYzTMD(a)HWpHv8sx&!{9nQ}gBhMX0NDU+^g#*(<tJ<b{Xe7~<eSqQnBj1q
z42LD`qRNE>P#gf|O(E~$c*3S+R!n0orK*US53?&KkYaqBnHg>w0PXFP6I42d*m$>&
zFE3T7Q^jF0!2O4ax*iv=F9Txcrj`yO+dwh$OMAn^$nd`c8SJ?g{Jwy40C!+j4p5@K
zY#Cm;mp1k%#t!m#PCy=Yw>phBQ59MGbGy}fvYptsn`A7>-*X~=y;%~#kKILqY6^&B
zpju|ayJ!UjjZBXeUtXvmeSYy}c1bzPEgjulIGj0uH1&xBssZ_XO0zwOVQ3T$c$r@~
zoSZnEECvZ|qQzIjc<rhTCqrc25ek;!UoZ)0Y%$flj_9yk_}1H++@^5wiKw{}BR{6!
z*TmzJw>^UJNb*OpRRo}uwMv24spAX3q9HpDI<FASq_8CL^{=3<TND)jTNJ!>#3ehL
z!zUpUi}Ak+Wcr`MU}I&s#Ez{rCL)g%Mx3xhhRek6FRA>C7(c8EcK;KRk~Qs?xazRP
zk?7ydM0+zzO4p*gF`)Gr$C8%iV1j%%I_F6>nbOl@o(%F{Y$H&QfdLnbA0h`$7Qcun
zAhr)N0T2OLgGfLQdl^Aa_#l&&tUaJ`*aQW!79Alm9=EDkL8>;C|5>U)z#pJ*TN0!P
z6WApNv>@D;wS~K?E{XtwBU>eOG-lAzsF9-~BS$l~K+s*u59<=4a`jm#NH#?++6n?V
zd>;|Os%!_3HaW@vD-_|ec|$wHZT%r!p+GC#;-P}EVi;5FJJ*88d>(!J?BVo5E&F2p
z^0S*Gc26q2((beM>hp~it5ch^^_q$9>V5qwHI@h%BRH#ReQI3q*8*4m_NvT#_-OEW
zKxjhM<FYc^34^{JftAYpe5Dr*Ivx?Fv+Q40IBVQ#<esH3@Uo?&y3JQy_J>=(XFM*X
zjehXFQl7^TF4fzSci>8RpDg-DFjNFyX_%&FL^^_^;O$_~8QvsgnmRL?8w+tQnl_E3
z{$a+4lr6h3AYKN}2jQ3Gz<?*?4PH)&T(T;eA{CLr)v+zQ0C-Il4Tpnw32K3hbAegT
zzI?(yy-`cDgh#o_jW+b+?ph8F>Iq>YPoeGI5vT=A7;-Kbt{&amRE|cb533{Iz~zKU
za$IT3Ijq}E*;)Yjvfago&-=a*%yCR=W1m&B&&b|mhFoX-5pA)F2<C*XVj32iSYS-m
ze1MniUJtN)-DgD4rHz4(#r?trvONp*5cw#PggFK)_mrJWXu3SW8Egw?B`kr2*=uJU
zvXA@#mLmBEC5FDhFE^!?z-)|hG!dvqGLQC#53y5r6(I>^jHYFruZbHTW@<`>={OXr
zn$dQu+)8ttDkOHmse)!~M{HK6Rn+#EKY5WWKYh4<FnwwNf=`U{#pe&9-EfVyd%F1m
znsz`c-;Bp0QzP9gR7w?QyQ<N&qNWs8dH<b1zNfHhuZ!LE-@fzad;HD7U$g<SOYCfS
zZVV@iY!|Q6!fyRdDGL715h_R(TxG-yw-Gp;5nD-s4=~dEa%oe!-~>%Kw0;Iw2Q3EV
zCv+Km%yl`6&)S`q%LJ$3@21m&_0BV!t29BE_vKP7WwWW?ggz&nmjBjH3yg$6C5*>g
zk0&x9=xSshZ#9<t)inG1)uqOATK(#h@_~BR;G^5<Z#?27oIe0vNv+$!a}S8#zHKHN
z>pgKj+M8xC%>+*aKcz4`6uXWn8F;v$Tn>7VI7}1eTn0+kdUPn83tAednF*bFtEyGy
z!XD}j7T+WcwpTnse3|1Z4t5#V5}T<L^rR8&W&76H3&h3*68nkTqniK#r`2RyVI~k=
zU^0>tS}Fxbt*+6*Xz4B^6LimDH_^z2Vl%*gyBuX_G;%u`RtgonA;6n=tt}G;Y)!>*
zO_H;?b15-f!);4Y?}nvmV^}LU9=0gWg+GEl4U=@?P!hJ%#p}!Hj5}C^7QkG=@`6Q0
z79m(t^s+#pBx>85An8$Dl|{s>2y7RiU5lLrEI#`!n~DHlW&@Xx+(B8P3B$uVbkj6Y
zwMd_lO7odY>k9moVqF;ySm)EiE6fZtt(qzPu8Cbs1DQ{WY^SjIpa{6F9BeOpkxpPQ
zy9IS#*%j)VrgB$1doNqs%TDcO=MENTo)14lH2e~jiRC8tvSWMMySR2M97Nuh;f1QQ
zKI8B>`$5AFWN;B7Ut`m{tox$z*WS3t?m4ioxGC#8s>#Or9<l!tV(N&yTM&B@n-+1q
zv4D^|p2y+!PYy3!?K-+KM8|$~d*tYgPmXR@UVdEX4#r_&+Tx{Q-UqMqdYQj|c$uxd
zrM#S)IJ&tI7+wF2NM`u;^iw58SJ$^cvx=y?M{$;G$(X&w3xWW&-F@tDrETF?EeJ|S
zo|G(trBb9SQdxsouXwrDx|-{o#49f1;e;td*7rX2E=sA|6++o6rWDlQ&NppRWN+NS
zkbW~056bfbWIwgLJPBAo)C67T=el$m%Z`}!wKR$)QdomTr@O@Fuy!7!i~ua*(sB3N
zCdH$;X%r7Ni$zPIiKd{M@U~bXVhX`S2hF@dB`6|VsPL19;-cd20-LyHEfSqEy18X6
zRH{0BTtx&Oa=WFe&V@^5&=4-o$yIYywsaqY89~DiU`=ku^#NVwbCfZlE1)#|IHy7)
z5E_#yr+-c7sCPmAQouP%R3mYl4y`9))P11xX)z(Ez>LF^)8tGQCKN$B!Vh;YLqNev
zUsQ9*zM+>}M{l?Kq(W3aRfL<N3W94E*QB$LoAKC0)^ePm@S(s<lL8(!6c+uiF`EX>
zdQk@ih6B4x2Jqx?liP^)c2P|c+T0o#5P=<9!XujWj(HB%XH?C@M>T*JR!S8?5?wBZ
zKs40DKpcWox=B?Tf+<vSw7_@FQ@vzphD4V`@k>+egD9Y8sDy@}FZ&KrlWi)}>2j3z
z&^flpTX<-O$7rY$2qG@t1wf;s{HTltN)(s5$yNV?>o4DWQh9QZjoqt`F%LC68_3+X
z29cI+UbFKt4S6W48CEEy0udUvb<^1PgoWr5?p*8Q6+8hS0FE5(k7l2j>-&{4BDQ+^
zX!>dW0d>9q*~PzXPEaAvUpCL<{cr`EI59yORcE&M(kd0-Jpb}8b@X&=4C-uh{4kzR
zjCZkvuv@uJfYobKgA+6QwW#5fhE}$MH_92%KMvF?)d!`fDfUlK2Ih?(G&8A)o=}5R
zN86gV(|_*V0l!9(#9-<}pAy-vjr)3z(^&7(jAXA7*8($Av&k``2EN=WEg7Q_Nw{{A
zs7t+7z*t`LjHxEHh#HL|Ikq#P_l{Vvyvf&l#q=bcSBgMdCKssW<Zdp2s?^)gB5Joy
z$(P>M53T!|Yu%3x2ec(}2H6!4Fn%w4!zkqA8;#{uI@70|W!4!IZ|SMC03{}DFyrGD
zPx4=R=OX`}>0wK~0+e`v4MkNFsu?vp-e>^l*yfE=N@?6y;y*FWn4VPAfWk$!WZG0a
z#&akzN!S^tLoVLssgy`0W#@pZ;i7f#NM#xG9ZQgJ#nT_h336HlR}DZV_U~rLsHMs;
z0bT=uY%*yG()&3hz<xwq`9S%Jdgh9IK(m_%rXK)gZtvy3gcQcy!@b;n?+9oav5=CW
zr@1TmYsQJQMq(jVgf;SjmiQphg*a#(=w)tC9DOmO99^p&jnt1Wbfn=V@WBos$fUcw
zf+0n&0`8bG8=dZ$fC}K#D^O~^kQ{7RaKAf_6T{gSJkVuyMW;9wh)87fj+K%FS=ue#
z6|5I!;;pz332ERsb{uKo_`Ss@_OEXA0fz&57>%_7c*$q{jUVpFy-7okJ%jJ~X@$<%
zRbDkd{A8PXRZug2>B;>kkC+Pv_U3)|d$>~{;2JeX(`SuDDh+%IDfWn9GE3(S2w5hQ
zb$SgeB4Mk7?ns$@lTpkO!%fiz@d~`!?khL~TL4c$u)j8G?lJN$JcF)|%W{ud%;1<d
zU6^Z6s(x5u>kkhKV+UWXKQC{;m@4}Sv|5M1zkGfF^0l@x);}6XoETndvWAinIbQfX
z3znJeMV>L)7J-XGI!@m1VNzKevW*eDZBZ>)QmhgSNt}vAmK}_Mxhzn)!i-tv<gmwJ
zSnW7xw~ZponcL4P%{85py9EHzM`8t?J`UsQ-^lrZmGf?+kV>1z2gC)~XW2XoGjzbr
z(b7ra8$B0e8!=<YX$eJp9i~vgSSo*tka1K?D$F`E^)qJ_HFL&#jYj^UMSGK)$^NG3
zSqd6B{TIz-9RXF3nxc$ecxE9$tFq@anlV#`WCLQBMpSRCB{7D{tgl|+In<L;y-$q?
zjG6u(Juj+!VLNOr;2tv{!8?J!6cl!jhM7>)kU^HB5}H;EE^w7hCxb4Jt1k)5ZUt#S
zXY3U)6!t8Nc9WvYwnX%yE=Hjvd}`m2NImB<d>RB@mzLzXt0kGoXu%=}^=FY@&6%m6
z6SQaZZD=>Iib9<rnbTmH{wb{`Q+gaqnoL6uB-<^;x|$Z7HDLG>9t`5Y{D~?o;doqA
zOg&|oDGfrIg0WXnW3t`K2`F&oj;sB<hD)yQ4bS3|F)l>`<dlnhxkp&zb05NzHcPpQ
z*m7@pgyNh*!<F9lw1_ya4N>(?U9eu3aN9D5D64|uk`|UI+Sp`@Q%tgji&8`>m+%Kh
zEk#6>NGi|c?()b@c_%a_WwVX{@ZQTSrI!~+6^K)Q{t4Fk2)qAf{`z<IilCo7ygc$<
zZ7FD2GJt20$E`03YDE$orZS)zd;QL;h%rv?-n%E<2M6aO76jc}=H0MfdE~MqxumU{
z=sXq)<+1$ejzuFGn$pqTNwsq{=EZOMn#s1jW-Nl<B6DeFjM2p%5)1=FvgpWqJ+UnU
z1|AbO=V=OA5=#{EJ1-)Ubr)vB<*d7}<rO)s<BxgV$qCRMy1Q%?<y*v9;suN)lK9A!
zhz&wXe^C+DNyXjL9V-qv!+8sX#{Aw@&2wMffFK`uf0=WGsk1t962{0W%)s(<YRc#a
zr_kt*>(O&sQa1($ncd^`5rfD6euHL-!zzKKDN|wAh>KYw)oR1+k`i~R+=vJ6D(GAZ
zPYdfvlqc1l$(?Zsjvzf51V|q4Tx6ct<daQ|f&k|k2zCIkn2W<2s16DI*FS6K+4Nu!
zkp!ygw8FgEgsL=V(=es7O8}LrcF*>B;XE0+qnKR|BXu`iX#iauWJLtqW;ltURU9?p
zbd}rF{@Gapx5M)K*`9yNl1B<Hpw=?eRKirlC|kuY4BOT@SK7tmNDgKmyAm3+!9#E@
z+<KfX&o&St2!~Gp){|8+6e$`<LlIf?FuImQ;z~}vhf+KcltXQq2eehkF4$m2BFsn1
zPu&5Ap9PQ75zb(P9SYCNQwI+|0XDpi+5ORtP{B7uB5dfwi|i!p_W9blub#i(;95HQ
z%U}HR=Rt|q{{9#L@bgGo`zUBojv9XSh6^oQu*i$4tFi9ZfD2pv)~}Vf)U?{IGOOEP
zEtO~n;yU1x&q%HgL=0|*uC20n%&9nJ$MtZ9R(NWS!Z~|>G~bPPP(xlFE!nLm&xWYJ
z!mA3ZUF_wa^0VWnQY_itNG2N#ny)UvMV40*lqrZ%VoNYX+zMKpB|9q?(59Qg&m}bc
z>LIJOVq(boI3e)h#4+p*kE2_*y*GS=`)2_Px#XDuPl8OnluQaM$0_R)eIH(`9NoJ2
za-?!ty$vM$7mB{?yvk_)Z=(mX1>6FH4qic`fpJ@c;})#2oEeKgKSVuPfcxvWKumTC
zVUAf5@*peQD)skYG$#8i6)<&f20xe3x~_+*AnR7o4YEO)1UuKq5OgUf`?Z=RW|_Ct
zn7?PcrMS#To3#DpZRO;)YV)<wMP#yLAU`G&;w`8d2m7dg>~gYIW{zx-4OHPaiH{TJ
zhK4w_gM8)SQhEQul_1q*qA^$y*KX`Txa(a~_QziNDpKD6zrVBIPc&zof`T$dO{I*e
z&ZYy9YQniUXw<{Li(u2|DXR)iO2(~V^de8<cZwjM%8Q<+j}g;w%*Wzrea^|JtU@e#
zsOz`^1zI*pg-^<i5&I(@_A~|uL`)_q)Xygu050WCWRt&jYnkJ##vd1;?ApjMGf?}W
zu^eT;y484N`KwE32Ls~!Z&~I9?y=ccfKn-t=V4BFEUhW;sb_=H4j}_&+>smWYEt0N
z2id6nFrxuqzlsAj7D%ng%k+R<h}~&vzONL2C>jev)w~>o%a56I%Bqr5gpgNWh<>eb
z(km==TEriD{G1LB%sY+@gq6oVY#v_2B$4?gtR`YB=jEb2ab5*wx8lZ<7H=__@aRC$
zAsX$~(rI13oPsJp+578kUF~H{FRp$0V(8A^@TbUjwTj!+L$X>qE^`Qj?3AO1M*`%W
zAq(;DqEuR1!430>uTLYu9S+U5A9Pzw4~lWVyEk-ryQI9lSbCW&9?cI0uCB2ggAPmZ
zV>0)U%PMcftv|L`(u6S=R}quWQ7I1r)Jp=mp2v1K@%v3;w6X)-Y+uthL>#&a2w68j
zbUqDHHVBGiSbei?fBbTYJOcyPF`htoLzWX-wJ~H*S!FP^Y8_XtA)35_-)Dr{4mXMO
zSQRsU0@$<T8hTpFr>x=?NprP?pYw=P7ctR@1hTPQr3-&gAeKE2<t{<tXcybtKB?$R
zpE00+HF{1>!i(*!uBC&_$J|E$eba2ot@IdL=`n-$rqES7MQ>1prpyp?CYbcBpNKtY
zUY^fR7Wj*5h|-NBMHVF31JaCaMj&zlXyGQHeV}nsqY{&V2(aU(2lWu#D?PBEl?#Xf
zSEOhQ!om{*CWh67125XP2nY8%PQj-ya;mv#LlXHe;pZF)u_!iiZ-Rqa7t_0|L*ejU
zF9*jyoPo*;f!rt6|2K4F=Q8T7Ji>Ae<;i_fNMy;KZ0&r^M*>wSg^@L}U&&V2Y5($t
z{n54O*{eWQ^=Rek!}a}(qhSzR`)*pDtmhNs&pxj|&#vsxPO*KwHUwB41(kGCL9_0u
zb+?v`_J(cDUNkNca0n<j5{*N?k0+x0PkgguBf5Fvz-^&(M@d~XCjjc&J7%8XrcDH(
zWA`oS?pZw5@aV4hr2=n^w$eg_yVU+9vE8L6r-{UjtK2Q(6u=!Xr5})&%89GElq)`Z
zLA~NsPY~)|0lM8>5fXABb!XxP0N*?=t)hRymd>!B$AEfYX73%|+Jqn1>Mt*Jq)a*?
z%s0<#?qR7ALG_c`?*DdQ_P*UdP31f;t>W*?LhiC-$9}-yJmyJ|I3&nMdO^VdO5-vd
z1Z;c$Ln6R8F@jqIuFDfdfGLE(ChWU~lYxIJd|pLOyd3e4dV;|0{C2MGT*dP{VGww~
z{N=&K#<ROCVL)JPyx!c{7YIy~?e`1<yU)0<D-PUg8vnuYVE&}P4tOx13{M6Rj(Qj&
zCj|$0Fjjs5G}vy~NNt0e*9sPPv65aHI4p0@4+su7+hacrMC@Y1y^i2;lWq9df)cwL
zn6Ecb>|(vX20*d9_50c*#?9vS_m3O980@bCH+IabuCVb5qsC7jw7LSX7i#R{Rs2;!
z#xDNL4+9+AoT8oV&P5VJ80UJ~b&@&wYr&JH7xwhw?5l;8rOWo3L&`z5tATVT5@n{e
z7G?cJj97P2>E?F^;RVt+nC>+K6T!sw?pU`TiFL=q(cd&5{s(Hm8nll-&5TFh`t@58
zJdtBU;_Qm(2`#3pEpWJPO$P*=P2c}D@y*&d^WV&Uv!VRg=r@bs-2Y~U{kPzUf@Y4S
zYcHWsyp2n?(XHPm89_hvGR{6CK|7H}+sEq%unBR+KInMKqN>=mQS!LDQQ2!_!E00Y
zM7^1a_Y9sf2a`Q&rXz;0+78xJ^4H%$(31c0&%gG)amx3W8NXYe(S2`e`o5~7WEvy^
zFNsp<GJY1226~0L$9`yUfZY@soT19EjGqOxomV_Gg?mlk1FJXV`@DMDX!&~N>-vA@
zzFzuz`|G=3-}`#@>mvJG=opxY`Mt#8WBc2ZN{qh%w&UR}G9QmaDRJ)0j)4T$=-c7X
zza75w?eNWShrj%G_&WZ)`t9%){Qij_QXcSnFM-Po9&H(Gg$wu{d|Ge%ouQYLmBW?#
z;nk~0*Y0)NJl^m77D~r-P2?8YD{9NTwISiELde5gti(3(V6c_(vw*|TYv2)y?Pb9y
zJ5OGc@TQ<7v@X4~GT(%IZXL-~M4=}*Ml-t$Ls;}V1P%{;cBDN5f5zXa;(iH#8kRk_
z#?Znvv-BSBTtt&{6P4HMPdaAAr{K|EU*Fvr+1<FR?5@{$H!kn4S9Uipz>Qmf(6MbC
z${WAB)Msk3R9fv%8mW%a)6|E4&pg%dcS|YtCTK~a<Z&i|iVCy&?_;&!8hQm=VE=Qg
zpZ&~r?4Xx*P%rG(S9j~PyY>8T{T}|T?cN{StzU&N^^R|VG$_T78)xGg<@aV<?Z~o*
zPDIP?-MQBJ_6lyz;?4cV&z?TI;v;NA+rGGawY~aECZ-!*{d!C@6s*@nY3QhtNHDt|
zMFdl4(?(wqhqzgpxErL1lV5#q>0Tro3yKOmR#XLd9T1qOgZ?wLjQmC(G!nDC75sQL
zisD2KIPJy*BvYZI8&vw)qsp!f7Cd8PW+Ll>k@vTg{kjPjHq=j3-*Cwr3%JWfCW%BV
z-5FD3e*^QU6n5{93Lca^ngo+5{ZMWqHYsx*2qaQrORrca!`;a)q(>RDFH)N5SnM*!
zf!R?AVJ8xTJenQno8=yeW8fAM4&){G2@#)=U}{b9vI9NJr<TPs%Z?xNNQdl;lxDsJ
zo7ieFaNZhYs_k)Mw=M%16&8K-=Oi$d$+$q?Cs2)eYA<_te{}W5&`qEt$t_~KHrXGG
zf7!geH+%(uE;^`Bq=0c)L1HO~P{fq%ib%{}YZ4z;ehLN&tRzi_ydXT8!A%zF=g$bB
z-$ILKOOTnY*u@rbADowD%rbJ;*lh&q2b<lqc0p<7Q^9@Kgj53@MNk@(OBLWJxln&O
z{3)mwAKltKtXx03ojtl$SB|b1!GyYA>^i(ZLf;h0ctV=TM_-JF6fs^zMv!fR%Fnd2
zgE*K*xZrT@f{+@nan%DS;m8%s2&=Jc$hNZEWi|FF+`DPB+~f|3AO&_4n3Kf)fo_#6
zrl-0r9%8OyAfMeUO0%MZEg@W*hF3a0%Yk(e3W&JZNz@p14;F3c`!**SLFt0JFeOXG
z@u;8_f<--o7A8m}v-R6uf~`bi`0Tis?sDsNnN;>HIpuKb1r<Rmg6ag4RHQCM2yT(8
zz?iUK&v!us%bq>2*ttxkesb{i@3fgYo6Z0c2)+{s@SQl2>T9gUlO%ck_;b^U83TIi
ztkRc>_vz_Oa3KHf6x$E(hRw>20=<4|QpWc$CnE`Arl5`e2{XdLuHI4rs;#nbpl`e-
zWsHY7NI#AMF5<fY(}LW<{V*cNMD9_>^ld`Z2{Ct(D_%s1y(s86_|h@7h>7W+J?Q}R
z57`%yw6Uc8C_AK$L$K@@B-sX$YY1Q-;B<Hm14joZ-5`s*jI172$%#+=`XvQz@!{Rd
zy0FNB3ZzT?u0SfdZG{3-$g~2W`m)%YT{}6*JB#OgTiOwT&@+R36_9)c*lR#2Bk#I^
z-Rp?QS3&j|#|6Rm8*It&lLrvEIi;WhzfGzWEj=`k8`e>fal=jigTloZx4vXX!LzHS
zXE#S)%&fim^3$&8^JDA}ad(6P>8aOepH=q19Nw?jW#0$+iLU)etNX*3_h%kFD}L;;
zhwmGuXZ@&=zIdibPo^U5y^_-K1>_I2Ztf=l-Mp=7tUvZY%z;Q6_#gu}?qP2<P;NKn
zNp!2R$Qe`fNgdy@s~*GfNxyIF<~>^XuX;Mt++*Orj&Vb*rjdjZ%fvNzsucgdguA%|
zMojmqs~t6xF`WlN6t*~{n_7ywW#S^fVY@|dguLD1_TfGbg2Z8xS{gi_$Qhm82%?&9
zQ>O4PZK?c*ZOQc|eO4-9P9dfr0;8`ilFuu^QVY=!ONkXJeJK=T5!Z$+Vz72QcZKIS
zT4nDoJFk20JZPbVEs-(RSbjW9bT&q&+c4=Xh*mCm^7Mzh?oDM6Qf*LEfIn96Cl)so
z6yiGvEO^G+yT5tgJ6=%|DS#G&fan5G{;Rr(Sca~B5Sc%O8&uJA;2C`Iqlhbu<V+l|
z6hezD@N@{-;+W1ZTiOSG|0Q(wCoI36IrBN<G`;H(Wei*)0U2-LV^=Yc4xDJni*pxK
z3|B32I09}ndBMudix*M(T*wgyFPJD{hT5vEbifR^nyr#qx~kv^-$l3Cp)lC07MKfN
zMj=q98M}AcJbv8ePlJ|Mpo-l%_?|Y05S>L%Wm#>N>kD0Sfhu<ssL?(n9X@gcDG`iI
z+Bt;rLe^oVbS<hI16q&JC&(y$as6y#Z6H`V#{1TL=}nVOdi|Kd^#?8HEmUAH$?$kP
zNxy6x*${bh6Z6C-?M{>AS6szS%0!?TqDBMh496&Q$?3~v$>Eu>E0MLRo1H)f$0nA%
zf;?Cayj}23Mwn-EpIx#ovA`xb<|7>O?9r#s9!?)z8-Mz6oz%X9)O9>tuRh;cIas`U
zP?#XWF7LvQx07l&Fj_Y$#;4ON(478%@~*9^jcZH)m43?$s(G23_j8$<lR7mysmkQM
zPBl^^w~@LPb;|^vxEdRbox}(NM>Zx1P!t0)n44|jLGio!2UKAUj&aRjn7!88dv~|g
zl6p5JbLyO`1WT>Gci(pJUTb~pTc53Q%LVIjGFwMk%Alo9f@B9!aHp65?#arM)F8Se
zWx`vC9e5)&F+4~X{xDq6NW)R8Xlqb${Z+4v1{{#pg%}bCJ_~4dp~3v2qZDQRp^QIS
zKg*bo;uZ5hgiRfl)mxqOO3l|K+BvVpKj&t{@1Nkb8+GSj&zU*<rWuf(@0B*_k8G{3
zzsRuW=IcczazSgMd1e%`1>EX3tyIBEYr3T|t978yV2;HX%8zFYS$XCm8A%YyW?Mla
zThC<mq+yYUg3YbjYfzA9tIa30%Ahg=tS{(HY5Dq(;BqPhci~nskzQtnXu-b3@0N)a
zQ$l=i5jSx)_?80jd%}pW5vnI-<gcXu9<Z&~eBWyUIIo16_lqaxSFM%XLyYlBvayPy
zB>7QL_80+z{gdT8*D37|H#Mo}H3xq&vspc@511C*Tbw;gtgg0t?wO2{MmcvKj)%XY
zz>~=6s{09%a1U`U;MY4GDg@@vPw(z8pYP0HuUb2IIwT{9xfVMJ*3EFW0WRG>!DZVC
zRYVu0jnI2YYZ6G1ydf#PzKwrZ8iW&egojhc0VV{Xd3CY&fERT6WbpFw^5yM&h$MQ_
z?4#O;ojK+U30?MLMHMVm^Nxak5U3kG;}jjP%Rb)FVIDWte7Itzy=xUN#9qR!rFJ?A
zF|^pjT`}Qzfr<e^fhAln_G!VH#06&wF~dd)gVy5iIKsNv%`$bmn$&u@cei))bXe%F
zPXQn3=@M<-N3^gkn`|N;$(mF&_z}hm(>iD==)Na*fjS93(S%<`JH3^Ei7i11!4poX
zpi2q<W(g(lN&$uR4QxI8>nWt??9}VwpU08|D3Z>7RbU|Hn6>cpT+-CCx`SJ;CXQ}<
z^5~ZFHG2&(|0JK)2f!-v870b^2#?=9166}JN9^x7b8?|bJjkLW{`4FeNXPd<?HmXv
zAdl`>j-CX`1T1YejLzR7DE4W`V~BAckly+6-Qy8OO@#jxS_o3`yq8&dy5(dFuCBRe
zCZk#PvU`h}d`?DB=S?de2RIEl=<{6j54Ki)$f*=KK(3M?IE723kBb3$?{M{UL29~Y
zo$~g9fQpnP7jg)V660$=auez)K<eWn*Ba-eatM|L63RX)1ixg$)s)HoQsmFshnL4A
zNP|Kla{t^L@uU%b;RjA~8bW3q1Rw4ag39?)F=WTT**KrBTx={;AmaVp=-I@RvpY44
zQGhbRa4bRbi}i;*IPqZP{ovBY_!h-2&PF%cpchMz&JWiAe(=$PYxk(bUdePS+5NRT
zc$hSMoVYNmam!2^iL9PuhIs;sKRMlvSdPm#PEXP{Vje;|Q(&uBo}EHZ!HEOFI=Q_4
zb-!{)(@8pJ!n91+ge@_TK9fo1boE<i#lB;#=f5WOWF{u`?daKVU80DJotcN9HeA+O
zSzaDIb^vOeYl1{Y$+>VC^i$A1fvlyZTgXRM;b;?eye)9W^2y2w#HT0#1sezBh;wub
zgCUM7RYKu6iG9i~MZRBONG!6DN2FNXuYC}{`r<PDrJ3T+L0(&b1o)K#S)cm5DawkF
zj3(~b5OYS42*Aqn-k+GkDaYecaba0mjYVXCL!?Orm-rTnD$TRcs`?|%A*9<_wc!w*
ztu@rg1n27q+yvt5(nX7?OQQ43>)KRoJ8*S$5z&b#Z9=M+atN1(h>i4mx|KhQ(9Dkr
z^A4SkEUAc}_>talEy=6Mp9JF@B>4E~6{SpnSXjH<`X;gridqa~JVfOYf(m;)b(wFW
zKJ#*v*lH=Q;tZ4tAX1KDCd$YIZK3$&ysQPJs;LqT5ghuX1Y_vz%UHp@)Js$nS!v~z
z{~!{D=gGNN7F!9fk`nN}si3$*tKO$_)f3NUBzV_fwKw3-?rTtc0aekk62tKzBx!>1
z8v8WaiK6zIa8d*3=X6}n!#&h7k&}B3!Y-|y#VPU$w}pGT%u{nQ-F8nj<UN%1UXEv4
zT}eoZm2TYeOk{)@a!D5lFV+v=%^ax6zi@>3@y@%|$<E3zv}%*oeUeQy2?jGW-L;dT
z-wkE|(8)>~PM78sk~cKn(f;_SKYXsSPJjIDi)#Xm3C^u@{dxrWNH|??Vm&6&jM;1`
z1IUKMu`~n_DY0yv4iWLfLpV&0cJNe|?h7{tuM-=VC}>rfVGU;m4JR8;>>>ULjgR=b
z(J6>?TZjJq+{Vzd6=#^Bot1<Eyb*5aT{9B=4{(y6b=!|wh6DA*-i)5IpfaMgcJYbm
zwKM(NO}*FFZaQ{>j?gl?ZN=vR>|t&&Aq#VWWW)`L_KKacO;9v3kzy_&bG*B>KGWdO
z9te}G?X|&>Mk0uK&TU8Q=a0E5QySqPtNtRHG3+kQ)gg%_X=iPI0E&C}ncxmkTk~??
zY*=oBLor3M*07&VD{1ER!q;r)23v_BQjEK1t<?5S=6ey5V6ec$4S6Aa53Pt5N<B6k
ziY!9kZM5>)H0CtX1qmQ~rakN2MF6zyFidHhVsV0DmpLr0c1GkH6babTnw<oJ-oWn!
z{}hpNw&}0P=ut!;5(eD_I5#T&lu%iFPr%dVBYq;JjIDMGe#ZlEw=HJ`Oh=<}I=Y6I
zq~`=D;}xc2gTEs8Y6?#U{>74#{2&I*F4&!+=_$={jBL(~xo2>5u;c0QD87_VhpMMT
z%%$1FoYm$`*V)sNHQ2%$K(gzo@Od7BI|P(hzd}(gIxF44%`rfdH@E3c$vh))k;*5V
z`?5}C4Lzlylg$||6u}m)05@L1zeV1N6Hs|s$>!RWfP8Pa;?S~VY3G^6tyT$V{bBff
zI9)77S)03jG^l+nZeLD5)q+X8%$Hj%eZ2kT@}Ts~)+==6_D7j-_hFfRDUE(CmZWp1
z23Hf`^u8z<Nnfi}!E%i9cJc3RL3%9;WLj3R<O4__@V+cuSw%#9vw`?p#I2qesOD`}
zxjzYG2y!fZmW?JwT}!XPWs|E3OxrzQqb3e7R7nXObf{93*I5}V_g2ktXSU_qiQB=k
z1=4M(AA2O0Ro<!*R1fid<)B>}N9RTE&!AXbgM?qT6^ESXb!6+#P#I0F72Yli!>mo>
zZ<C0EZ#I_bls@$ler+tn*$BEfDMW4HzN;K;hh2!b2!6(co#Ft#+PRV+UnC5@CCpw$
zfPGSc?L`T(`|EmgC;vz?g}V@ghYj0^&JXirhK)!q#)Z>IFBZh(csf!2nx)_D(_g=`
zOyj0AfS5bkgs+F|TYZ|T6`a?@g|rWphqq#{$r>{IlnyRa=vKXgO@W6-E0H7d;#Dl(
zko8sDfdeqPi74KW1;qns3|RmS&t>EfkB2}Ho^3xko}|QU9?Zb6Z;pUPiuFK&X^IKj
z8Ud>>QA_?Aa!+eQKK=!Kq@!f5)Q%DYAaITHIvDUfc`co1W_qSxPN$i>Y<LL1wBt3K
z-I#=Xvt9-}KF=KK`m$qZxjoD5d{tvUJdRR9NdB2%P0Jj7UT3sr`BHzRC6r;kpo#LO
z&qP+hppQ3wBu4f4*v-M4Ine?RrHi6I+kwNQM+&x!l17+Apn7QxVbuVrKaRjZ4*@`M
zhi$vvMdW}<3iY(fFPM_0P}!ylY%z(H>qC)oz>BRmS5@VDU7NJ43H-e%ORm;}yV{Gz
z&z6q~6;8#+1W5Vt032XmzyWN6TB)0qC~$la8QM@^mpIoipsVlEqKBiI_t5xw9nY}G
z3+dfK`gh|{{>P6U2<wdl1EU=8(eaGp-4#)?>B`#|_uoC)<KeQY2N&z>XW!kUSlR!+
zy7z8l`fP=-((mrfTnvuk6gZzBmEp8lEbu3LXGbGEpJ8hHY<>RX&Nz%XImd>U5jS=K
zZx1K^2LOBj|4n-EAl}rCWKI<pPFYS?cfL~X2F%^Oo-zv&;cu_!R|TCdr>ec!zHUW?
z_<fz6ekWt66KSj9M#bqvdOMDuQ-<3`?^xAnMdw`l3Z3&tMtL?sSJNtdt=iA+adRy-
z=;9<7@XU(P+yc`|YyVBZ$4)1B%2iq52gz?al(FO^j5r>NXk>+=kwXxml)^1O0h+6&
z8i-?o9s7=DrR}lQPF5b{_Y$^kTiP*FuEDQ<6cCR6tiI^Dc^g<b7#FH9CQa=YgkxPZ
za;8DYdk+!OKN<NvabNVPRPMiEHqe#w29RF@ycJ1TS;cm3akr3s0OrF|M2}NSIwAd^
zS^(8yQmqpe+K0<XW!o|WA7W3IGMc{zQ2uc3bYv7mRzF3C?EO)uy9nGDIZr|af1=~_
z!~5EY2cy4iKfOjI9)d7)6d}2b0h}U&?n}+J%3{Im!ug+-)xlP&T`bZxdf0RLc~%o8
zU{?_{Z}jA2LFV8hgZu?#r7u9jdu6i|Q2v>oANYALH2|sS9377Uj{gTfxBkaA8Wzqy
z#y8spe+#(w6)~U{fN%KY_P2cwLYy=o#C_yXxvq)ARY%etiVbx0H0N1Kjb_fusxPA%
zQk^@(+Y4ydZ(+LoE6=|OSo@HutOjm_Si-@#XS>U1W4n>j?6(J{cgt%bo&2UWF7_PA
zwtCET0aE8lw2oOG{R8kVX=if<=6;K1OjW$D6<b-0z=DSNKkK&?z^>_iDfHlA1dk;h
zK&sVEjTk_7JAQ6Ei=fD}524en?D4S8`HhF5xRGs#cmVc4;>k5zO1*sU>&#rzapu<#
z1`0j`GgQn;-atdDzNQFS(<3%e$ZLNVyTMifQU4AeGRM$vvG0H2k6A)kVuqy%QwNMP
z3_*&ATF-0!cwXiJ`v|MP11VN^K3iXO&7`hcYna<|2uO>4umu9h%ydSO6e(@Julc^%
zd4E+<(!LZBG>8vO39NbJ<fmaY3|oNG2tTv!Lxdc&P9w-`o<i6dsmax)06Ja65v!sI
z@eHBP!r&|mLbG5(d*C&U5UhX94bm8(G*~MHqHJJ(>Owe1&C%v<G;nyWQ_3CmKx+DN
zhgXaN+i3Tsyc7milMVce%wR<r`b*ca9LRXbXUI3z3_c5Rs=1VhM=uc_QENreSz6K=
z--I4u?Eq=ce1g;3F_GVlra69;Z!KlA^A;x0PM?k}3+=+@1l#W}Dr;x!^QS|zXLoMD
zd;ASR$itJTLkp+FLjW*)*18(US>m%RA#kxkdIR)hO3SUGupzRJ0XlAii?{K|9Z`vC
z{A6>tp*jB)te;$I={qO_$Codke|%W^I9dy!#E1otmM@<aKNi3LW$TIdVQ2Jm^bvg7
zs|l2t91rCZ$VEg%rv&`EA<7U_ZQ?jsZU8=&<)3OopQ0#a6~m~>9-Wz^yA}Li<2|9#
zZnVygFmDInDF)07)fJ|2DH@WCP^g2&(OO!dYLKTaIL+HF)}SEgdC9+c;om0!-!}Mb
z%BXEoV4+p?%oeDqRj5<PiOf&YbF28|688e|E&VP;7GWGtsxAsRHjFpj&Pj~Hi!P#r
zSVb^(0)5AJoM>oU!}Mz|ujs+FRU|~0C4fsTXdgkEJD|!Ktj{A&c{Y{Zr(0}`$;}V`
z6<t==hSunqWN5!L+i30>L379V3QE~3?*#II(N}wpOj9hti%`+=DD#E4FkQKZ#hUl9
zzH<#hB&ZIyE1JG~b98*~_&#{itZ@Snt5T{LT<O1(2Tc$vaH4yq0j48hDoijP6`u0~
zR#k>c1C(a1$s_jzle|<}Lq%wh^c;?#A`cq2l8mHaM#Gj3^y<sa0BN|>ugsS&a!NB*
zgMNl(&N`u*7z02uGi!7kH;n-483Z1S0>~Uj>N8ca_DWU<K+bLavLpD@<0rM<R$arR
z5UP-8i=YIbr?uldkaUkUe}X=#{xqM*$1`tsA!5PjG3ZFjiQE8ts^gLG$EMB?0DvSj
z>SBHIe0}y^=>@@)=kuc!zK8=kHVpm#Z!a&#Rwy#kdEDQcPVFXhsX{g)o+K3=GiEol
zrrCn3*w4!9xrCL}Bhq7KKgW*T$HT7le9pyEdNDfo>25M2Q#$~;Iz}?#=FN0E3@_`@
zJ8#F^t<fFtG|il2C|^`IUGsWR0`LUojt=v}B9hR30-(K4(#@$bvVG#8X71L%B~rSP
zRTm!3=3V7;v<x?+cN>}iiFLHBw_}+-aV{<gQ2$GZqF(ugon!uS<fqej1nlutMwZ=z
zWf{s~b|2LFzM<!PV|PrXEjt@BonoNUS$^G|LN}FWZ$ha5K#F0(>#X_Pw+eILFdYr|
z3O1svo0ijh6wSS%yPBRixI$T`Ut=qLs!QuGxLQ{4*DTxBx($upTGvQvrlmQM<xcg{
zGp5D1CH;}UR`HJC<Rv2)?}o|QohcaX`G{Hwqt!5dW>3W2jg}*XJ|Qa8$L{HY3Lj3v
zgmo=AWB!7_X<rnuoTHV}KF_n8oOI1wrrVZ$FM$gQOKb?K+yxB_cmg{s080cL^DeTA
z>pIp}h#>7dCgSFMnn)F(0-p;!f?gDX_I1_ac~JjX+XL2}*R7<HsV}~sb9zC64+=Bu
z$&PZnzpuZ_+X=c)1#zEx@jfyCrBgaW;5PUBZgNWRT;TvaG5?UH@i0aWk?fo|Q|fJ1
zoheBD5KGNNg-oJ54V_dM0zu}xC)F~KVnd*K1%t}vNJ6^bVtMc*it+AYbQr=hUcmRR
zsBc-t(8&shSjruwP9cW-yBxbeNnEBVPKytdX={f6{PhTx4CpF;Qjq=eCb)F{o^;H6
zAhkJ=pxej)${0Ysb2?NxpI$hhUV1mU+Bx;%HmbM{Qjteu$?%HvjRDcSqBLU_x2{NG
zx=u+Em*1B^PD7Cbjr|WH%89*yxg2?PIb6FOebzZW;vrTX7?gyQy^!dnh!INS$OMvB
ziatr)ZTu@sCLRf~wzle{6yR%|ON2KJ(RA8wAaX2<8U!SyOX7#1=G-}pVhrodA5koA
z48v<f5D0^y*!s!ZU?9SFS5z0I2-*s3-Z_tA1&hm<k(e+@Q7@<=9|(5c#RgRjcCq&D
zM`d^D_hun)YbKlgdQ#WAO^jbfDJpMUHw#d6kFP{x`qL8BImhBL1d|{M_;?aZ3~*<g
zdFR_eiSLwM@n>e!*m=icj~YgRa^B);v|T$X&_kKy6jRxh6$2>ZQ^PO|NrTno58?5g
zyJBnApRb{bw2fy`>(}4LGWCNpOMEX7rU^|Pzl(vGDXdF?io6h!DR)fQV3+-P1~2<e
z<3%5WOpO`M%e{s#N5M!+6d~P-$7f?t&z5K3zT7$c?jBpr-o98pzuz_uNfd-TTwz@=
zUM{@<X_~s8Pyaw)TPy8qJ%vmrkt^hLw!>F*W}3p{+=ccJv-pU-DXkz4-6Z9ljy*3k
zV0BoSJxfu|AYUQ?{N1IxOHI8ySX^T=S%Ap%GI5{+qpnID<`6OrLJw@!E9<n*%d$g@
z!5DcU!3(kyxF9X9sg~EFqm9=7+*iWR9dcd*73SByO7zO7%x%eks<XyY_m+9f$Pd&P
zGf?TE*Kh3}Xx)0IzSgTj<UR9xk^NqHeaIfYpQk{K+7dY@szvXaOR4?U&SVDqjAU<u
z-Y`B>*<miGYo?6`o7tgEUd0@eFD+Oca`i38WN$Oq2F`dYxR5Xd+jjH1quppo0?zr(
zZq*_OII`aa2e>9vuefN8vyH&3vnlT+urFqvD`%8Y`THuSeNfiOQsSh#j~N}zNFQpf
zP!ML^cLl8br>JXPq6Cqarv8=FL>@@8?XW;%m&pEK!7nR_wC)JAZcqy0hWQU&o&z!F
z*UfF|O<Xl$#L`U5F|!5A&d6ZR03*YE#FkEoo^g#lo92O3lT4#?qL@(frKR(k?D;E@
zU*dwhqNOvY<|OT$sTnEQf8NA5Bvd!Z`CS^t#FDeI%K6{c0eaXtJUc20<?T?PX95f6
zZ=DW3V$I&ZoRKb2?8FjLy^R+HV2;lcC4H3eO9|6G$|Cz?MQi6vbu~Iu<Jh%E)$^M@
zs|x#YZHXjvqnA&pdO))y1~myjOqM?^?`hH{<M|^W9>!782Cqyef-Ir8#V*_P+vtGp
zaM!K9wsufIJF@S$?$1e4^99={g~0gANg;b8In7&ia6y3FjRdY2Ra|bE$2dzV91SZD
zbf53y4(2%a<zb7)j#tb5nB^>d@H-y}Wo@1dGAt55NYIQe_~3R{ky<~AT?e(!ZfjS-
zZygJ!jZ|c*fDngP!vTgNg+$cpzs~%SaZCg8vOlWm4fD(n&sC=1dCo{zz*pXvB>)<{
z<|6smB^<XLho1kr-fObAIQi54%)d4*X46<+!U%b+$^M&`%3{u)?=Q0ua3}xUvsKV`
z$#SVT$4py#3f?HpgaF&6CrE_I`Mq`3^pFyAcemagGH-H(eSU%2Acq`3zt_U42iC82
z`UO{b4=MONytuw<YRJD^kPIS20hA#EsxS4jmD02I<xCgP5J~0rV3a@*GnBJLzN3T;
zDPQ-T{Y^BLx%X)GS}@+gOX3ehM{L{GGQ8kzrI5mg2ud6^=;&Qjj>vZ>wIUJOheZ}h
z`J{G~@VM;|eV9C{l`uc!P=p}Xsb5GGgU2trLIN11V9v&`VTK5#dNc+!t}P#AJ{ai0
ziAZ4(A8B1gIke+DZ0jIrutP0<K2djJesdj&gX0G=3h9qk&X#BW0g|^bm(LzdoQ>_A
zt&hBYv3mA!D1QFO+4{YA#pmbKmG^_w%<2jRQ*dWMHuvq1!Yi`<-$Q~(Fwuj<TuD8X
zjFTtAyR!$s?+V?GtZuSJHJ^@{K@d7fI<Mz$n8_X!jQcB$V`g*ADU68lhmMeqtOO9+
zy)SP@J_BDag>M9?mv)u@H}zY(%5@AM#ZEIfN4J!7CH!N+t$^1l{Fca?mYFpNu69(p
z{(==z;5~eRypgl>Zlb$jW>Qx>ls>c9OymmP?0LO$RSrJ$G?5HrC7aOd&%K|bQt+cj
zTV<T?sfe1bLi7tmt5@URcL?-#*l{eP{cpF+X0Dh6wE9~G?N<GO*;;LR>=3+@w{##b
zvEE9VBr+M1U=P-;e~NViu0;XQ>Kds%s&nh5=-G;J4|lH4u!qy`bk>p`gs~aJafuex
znPmSh23~%|SjuB$*zYSa?5RX!O^6CsFnB^=DSon#5subDkB%Vo=(7(K%aJ+vA~+D>
ztw<TY%5^a82?gH$KlZMzsf{aJ|CN55hZ#N0Or0O#*qJ0%_za0BC#myts*xIK5Nbsi
z2s~lH2n;sPi7^gwf{D4v2n!)D76ju5l6U(bsD^NH!2jXwwbtH!Q>(jo%O{glb*fUt
zQt!3<qTStVecxK&!VY$pDxXYM{j5!ZenZ5}W{bKq_L)0$g~erxs)MOulB27juG}p2
zd@Jr^Im42WpNGhGMWUpBU@)Qfq>P`xjr$5`cvsaYQ0-Jpb^9(}zsiCUbht4|o1AfQ
zx2xEGt%gMd7*>L86jYQ4(ZlU{Yp@6~bn(3zy_tM2H7PS$U7>OYfg35AZTJOVL5%O3
z=to8^#Mb<>-$T^kKVVmM1cQ_VE2(_~MZUK23U9M;XD$SLdpf*&KKdeb;r;3Cg!gdo
zlKX_NcBPCg)Y$VD<5+laq2aNl)~_Y)k8^khu_&TXp0Ax3s^Qe&2IRsWT<VMv9w!fg
z6w2pC#Wl4C0zLU935u86Q_l)1dXunQ5-(|!;}e1aVrHZe?JVvSdVM>(nExi_{WZ_)
zMqH#tL^fW!UFW3(houlqi`uT(O5hb+F|Qbmec~iR%?oDh1^&M*7V!U<Dhz?_bSL>U
zAgA|&-azi?+0Ta{pwHb)_DD#n%F%wt=uhb)my&I#cB9ZtJ(3s!%I8DV^lP+t5+JZ#
zRsN9CQi_`I<W1INBv_EfW5Bd#d^oi%QNVkc(u)%g=V*&vjkuu<if3JeBg{@8NO&Zs
zReV^+^`k8rwFI|@f%z+Zk%u~S&1%T5<rGdkb|m2ow|@;i+Tmc>|3LA;_VAY1IdFGa
zcM#08=#y8t0)o|Xy&MxJOQeY4?a!CRs14HKw<{zRalb2k2W!RVdIv4@*X~~I4*L0~
zEtKG-keQ>nWcF?^C_WL#tjQvkt?Wj0SGi>oS*aEDU5oEp5y26~IPPv}{BA90J*6b=
zF%GieqF1~@%i2bXx1g6GTnsj+E)GC<g-f(|-bz?gl5lG!jTk=!9MV$9c)@0aR{z}b
z=-TnfCh90WKOS90t%ddD(QiPgAA}P;BD0g(nKz5m$0Psk&{`1TT0{YVpQi9`di$1;
zT-c$FWun31FEZu+;ozRtnjTgCQPJTgg@#C|i#<Uf0p!ljmh=^yyuU;AVa;kRkDPlM
zCs{Nnc{fzMXflub{SUU^-k-i$S-(6OzWi?c^43G;a^m6T68pgZ#lzM0+U4U%DC)2}
zb}?0aCpr#cTEx5ZrGZkPs&CJEjdRN<1c%N`B5pox6wyZ&2%Z=~-2EEvYayJLgU4Z*
zhu*S~gGVQN&cELg8|k4p%TtDyiZT1c)=VDq2>owiX81fxI1CfHZ-3N!%g=b~`nSOK
zyZh2Rh<Q_#pA`A$VgM}AVEA^}Y*!x^lPL2EIMd%i*xQ68{>4HOH6g&iP%oAp1phtc
z^sDu!!-$d;5G7&1I^+s$){Cn|{;%NnaZu+E0jw0uXiGaCM-hrcGJU6N67(Ju>Jj`!
z29o|mcBtm3Ti`CI4ff7ro+5vv<`vU(?$O_Y$V8KOx&!xtPu3eXb@a^Pt>*N8qh$$-
z?N_x|l~-F93WTEMj)v${s7Db3Ktd@11(hsFgM8;*WFt6)Dy%q1QTw^~kxHoi`YGw7
zW&U%;L<K_^yX?%fK3Z-~xB?X=ChWxw^Yx7>QIilPQL%<7*i(Um9TE^0!{J=*LJ0wv
zRYk)9+df4SglFe~cPMpHLtu?+C>9V%^D3aYMamX7#S4JqV$P<Ikffmapmk-dR>LNh
z@w?Hx*na)Me#<yCFZgY;iAfO$9%!NC)`B|?MKYWPAv_I8c|8n5|IxA{&NptKK6rMv
z_2b!(3sCh@oI2T`Ion@7nJ5t&`DU*C=7)zZX=6YefWz^#k$u?B&L%VfC`__1VP^+3
zC!0%W+w&aDMBn?(aP@p~`n<S*`r!HRBsan)p3#x)&1JB}RJK$6U>3v>BohD<5o~lL
zXUM91=mzRg$Y)5XIV}|*l;5Fc7|Z2ltGW&L#O~CSvS}gduq_oV3oX+rYhny&nb6n6
z$^ht2uor$`guH=tr?i#0ez6{1S`8!}p@Bj+(z#{3{`y}Cn@M*t$fvYKtcj^)_Czw4
z$)|F9pI7=sQn`^&L)J%7<^+xv)IdcD^7~Sazz5jPR>GPgJRZRk(BgI@!A=KVUI1v5
zLHP!srgB;)n>#wl4dqi^g!Bg4VT)4(u9_A&7Le2Sj1FBMQ%byB&2$CZ0J=%1zc!B0
zA>fQ*(h4}XS2Ynm`n=2{#-M}(z6{V44>~@oU+3e~0fPbG!jln=B)_Aoji9svx&YO6
z#B@eb)YDhv9UvIAxm+E-{Ptd`EP#R?nOGA@10fnW>)TI)dH%@P9Aa`hMTlqjAf$y*
z83O1a`ku@vmV{7qIYgcI-#f68n~J|}m;!C~`{MdOdK|bMFy30gSZN!j&32*fS)duV
zH_ILECMe(u8ZK1sA#$jg5yCiz3l);Y3*yb`!hzSlg3k2{<_yac<YCT@`=+?V)B78z
z)4n_5UZy#EX1ggFj$?FqW_7C2zBp%a(H#ZS@6?R0Q7`q*d@4@w5<NE<rB?Znci0x(
zauvTS@Ww3g1}ISFue_v7gYZ0m)Q_8tWPN26dCyT%>-snv5iL$)#q%VPH>LLAA#_|C
z<ktC77sNM0(tV|b)7Bz$!vA-U8eX`c7(?9|6>58X)S<y%dcxrAx8tX8e?Ejl-rkhn
zZS<!G6&2QHZ=A17SuPePj@^iNr}SJdn*;Ug6uqz2WU@=^)1d74Jr)<~P_y5wkz)Mc
z*bb3h9MITHSXLqaBkNA%??uA2I%qdqT*Xe5d8or2#G8)JO}?HN=QcV!GYE^>Z*e4C
zfm7u>?LI1kDm>ozxMNPab{cfc<F!3{8i`Il6y#-*xKHMRRPl2GD|YV)#rFzU?|vOj
zxIV`Tml}9c?%ou{o)(ZlE#c&~C`2h?<7MJ0&C1*=elFCP)`dt*fiQK8N}VO<QsD8s
z2)I{G%)No{biDCzae5YbgA$IKm8L-HhlU3?UKBNH{?NF*G{>`C&1w9Hmn1pGe&Wc+
z_Y&W!;afvvZw*?-I_+(U?@YR8_m|r6`NaC!{1`MGAAO1yx)0zVKw^%aK3zB;eQ|uV
z#ymkfR~)c~n(+t7W3D36xfg+*tRbaG#2#^fKs{Emm-o78EnX3Azuo=6`Fb?VemNaE
zE9PBAnlc(M`0Lir+wmRc;=6;(@3t>$(-#Ztzzae@@LJ{K_VVT2J>_D07&t%jU%#up
z9jm;(`w)1;+Y(P>lK?!dZ?EEC%3j@Yk!RKV&P08CT~KJ7oP6^x#~leK{emibJvc;%
z<6RJ5_oi+766(xv2sX2|Ufdxc4xDX!WcA)I;r%^JbYFe>mUs{>;6NuoM@xto@9vNi
zfHX;Ga$Cr=As1E^+}FC`=nBNiIS9NG*2xxaR35|<Vqu$wx5VecglyO-@NMv=oO?yI
z**LLfRphS<_2N3}N)<?5%H+c=GA)1^L-vbDHU&&SUck{MoKwy%fq;jecUn(&Q0e*U
z8_O|C7|K94eYBNKLAPu19`99ja!q>R$w%avJoRPw77RacK|Iw>XF<Az_8}xaW9#ZM
zkVJodlXfSN`<Je0tfn+8wY4a%8a3>lq?*P}Hf$4B*$)xI-<Kq_;BdM!(1<z<{+g20
zlS!ptO(pd%)ppr|yXxdF8oY9}T!ik_L{DKUYY{W7tBswY{hCtS3vW%*s%gt<5rR_d
zXq=;f789au*S+s36co`qm3rj~HJF~LSJp)pZ3W9@t4#&dOUM<?;_&C9*0zcx7Vs}L
zZ{s%`FQl~u-LVR{fW*{zw2sl^?zPF5mROyK)F*n&Tz<6JSb)8W(&>D$@t)7-nuRU4
zHOpnxfog>nTIGvat~Y>?R3Dz_OwRty$^O?MfPNeNl+)o==E%^2pUqCc`R<2Es_HoV
zN#O<XMEfOkc5~`qew^}~`nTqW2+flv7?QELk&&h{!0u{hFrACtxY4GmgHh~1zup$R
zmhVeTvp%Nv2N|P3(xHR#KL&JZ)oV1ql1|6sX*JcSr{sH9;|$kWC=k0Ey13cP`nsCZ
zrAZ#s{2#iPj<tkw>~GlP3_rK48TrCq=WK08Tn)>oHJbze{3lk#MVaNjYq1+2x9Rd~
z@o3&{^l1rQy6hHh+pb+{k7Rh=7-BDGwaEn1MALxQ;Yl(<_0DIoEucJibrVsjb&uYZ
zYc^=nmk2PSw676;JP<QBHcSjcapQvb0yJyD|L_Gsg@{x(=wvl>X7CRK2Al8@M}mS5
zh0vTG#8TE9VV7VcX$N7Fbq<a=FpnTwh>lYW{IyM1C<Y^ylY3ts-~39VSM#ih&+P$x
zD0+x?cscJYQB9%r4#uGC%`#9667`_`l8RPdFKsbnU@7RcO$_<goq3Q!rWB>^2<@VE
zE5mRN1FOJ>$rLUn+4B7Cvk-()=;fRd?4}4-s?2fQjA2U9bNow<fA{?hHOcu$1zub4
zB<`gLl1b=h@1TpjP6=MjUA~QeG&<Em6M&;@KMCfbzA(G|>_Dzm2zu!3#aC|@cbI<|
zhEL*oNT&}D&UUM3rEi1>*_0}JPtEFaax&Qi!(JhwWQn|$=7q}j)oHU7jRz3oksVkP
z=}a7m#*VDns@Im0<gguy^MN0}if&~ON1!Q@CbAreJ?UWRXt6h`CV&uOm)+>MY5FVu
z$dSZ}NsweeKFH)#iKN!0r4pLrm9+r%X?P*n{imh|*f_RHlJ5{%4Yoot)R=??q@@&V
zNlB6$t^|n0AMC3UCZ~surhvYSG;vWzUD7S6;DUcF33kDH1vV`PbEH^ElMd_nRIgE4
zfCo3=Q={d~6p#e-&EyMv&5`Xo>I&)P$YA$ojt0~gQMJ+N@yG_w+O!nOmd_EnuBePc
zne3hZ4F4g1J@KyhC@6B|gvg@twaUd7$0IJk-BT`SA2$^0@B|V$-2Ij)W1-rcWdz`h
z9&oqtDjYhMel<9Dr=uZ7MH&ltu2~LRn;zeCwOO%CYpb>ix=p)xI5^Ha6tkFwicXdR
z#+Jvb%YZk77xV)G)|L~bxLhRfk`H&bjl7<t0a>sE#(=6AA`{1~u>c248Q?H=UiRJK
z%GY7f&?{!qgpxpSj4tzgM~m#~ZAb#mUx8PKRSp#|V7VgPH?R5!*;6sQD=auP3K3i7
zOT}{d*8B<%+?heq5!gcx!g93K;WB#H04SJMG*z!G+Vlt)BBbTpE<Gg9yK|6u{{11L
z_xLNgln4<b1#tAP>@Oh{2N(8FAFXo?6rI(R{re_bN{LO8v<7ir3tmNs)}B&NSx~<l
zRP>`hw&l9orN?_?;So|7tQrOLb59W-W_4R$ZNKvG(JkBCKD`mm8bGn@gEu~Ii*7E1
zsk!ajK5LJ)XN;T?H<GdLTrM5)!V8@2>XjBzvhH*PK=xsK=p<}5G(rxxWhI${Ep6D4
zf&F27AZYesd(0WmgiXChupwX%p6S>ACYXH<XD*q14T%6VSmnEB7gdOug6)5djvydl
zEIRk>e58`E;ru0YubD~s)NAl#@7A~H(Un~z74`@T*m#`sdDmbipLPvaQM0c7e<GI;
z7cjb6zO}y<lJe|c%R<U^5U_^lDw$jjChsJ-+W$O6YBk$k_{?fP*e|h~4ROO{olwXO
zy*DL+GQ1Qaaj`jpWfkkI!*VIk^mzWdzPccXc$I8v#AQAKB>}_tv(m8Kzx+7xt&=gb
zxu3t$5=YMjUE{{yaA8R$9JAYng-<cNh*RuuVo}H;YuD9G+;9mn!AAsbK5UoF7B&yU
z3rdqYP3vVB4lFC#l7P!7ctqo5(q7?(Ne^y|Xz~NgO19kHGN#tiqh2=E_DWxej_nP2
zPaJ0(9r+&uUc%N7fdErLtiK7he`o~cWPT0ts{YIma~qvS1)u}a3L4$OO&ij%8@=G0
zLu@6UGP_kkfbPyeho^Cy$cx>Q$qF*v1HjuFg>l~AAG>%waryn&<+sysZ^}#FczY8L
zm~C7jZB?hzOrgI0oLmmV=1QV%jG22n;>>zAEoGBX-3z{{sLVTB@TQ;R53ZGLIi+P7
z%Yim|Ar*3u!hvZekt<q80h9`CmW9-A<TCl*Tt4Fl(&X-8?s>frI%GJRLv#f2GwGYy
zQ+~vY-Smgfq)r2zJND4%;4&cWKaj4~a+1?nu3rHpK>%jku&Huv`QdG|wyepriO^Ax
z5lQ%sT*+`yTZtz{LdqgYb-;tl8Z;Xreerwvh49yS(ExB-*qYphwK=V@o*XFJ;`r(J
zyHQXyn9Tz^^PlfGPbln`7A)va$GQ>`jM9ma-4C2rnkt<;$jsQUA)Ro=%=A80KI4wr
zB@@x?`UbOKL^@C-(IKB%h?g^-jP+@K%*E}N&x3;rx*{HNMvEJXNW07$(Jn4q1DUkP
zRgwY(lWGpMQDX^Ehi$+H`Bvo`Y&FuLs}<biLPT5;SS?_j#p^>^&^2kw^J2Ia1_rcE
zXa;pS?TR(+IuGw)+inm-_#a0v`i&k%ocdrOM92o>NOtxEdb~Fo0wlbqWz|f^NG37p
zf?cU7K^p`XUORe<S2_)HzBx41aMxiQLsT&~TEI|gN`yyc46r~omVzta%qDx2h-!U#
z0=vmGpKwz6fn`%Okct<5XG5s4Urz#BsHAmvC4(;nHGP=F%DqwG_AQOk$8R1?oqe+(
zoT|4bv^x(;{u3jJl1=)vOnowUuoRCf?R-}wL=g}=d|!GybD;cMd2~^E6t>qxRmf4A
zs+c9@+eZ}94{{bO=ZfsBFrp9qr@7_%(=4LnzJs6l(7|5=KOVjuVFtmiAIMLqx~~N+
z?INeVY)U4Q5ToynW-{N*yPikS925ts3d?uFfOGwcF*Km}s3~0wcb9TE`Wmz}>Nk@8
zrcAHW9_r4M@R^|vTjll0>oNA#Fo^aACkYnNXB8zm=&f>ZcG4ypcW6r@gxa7mT^LBB
zjT5}&dzSFy2>Gnfu(*bKvZFR!^kpC@`j|PI&pE%lPvV8Vtjlrk{t1$T!`<6DnW>x{
z44=&SCfEvU7Kh_ae$W&w{X~NrWXMEIC<*J$q;zX)Ld%c>g{4P<ts7_*v~I9VeuI8R
zCZAH6dwZ2l@b?=^o1V&lDT%k*O%+qno>b@O@s(6X0sD>m>XhhZz7c@S+IuO8{^*TK
z>64h3z9LY~m$Y;DJ63@dLm0AXs$W==8BK*XqGU+AuEdODy8tBkKh=;09}=y#os{Tl
zQNbP;l$_7=GR>X@hr%atT@apT>AEI(OfzUQ;-`2aotb$JxWfETcQls{MR!u|*MGXx
zix_d%$ak<{1#!WC@=dJP0q|wSwTaE>`_i38cT!7f%KMi0l8t?cw+c#BI5aDU@)`R;
zY&391WOHh%KVh0DbQy5`)JgUYGtL?yiXoZvc~$Mu;#$U(WEF1HprdXTylc4Q)r8P>
zbtH&tDZ8nvXgyjypM#<v*pyUl9vJK<;spf<?fq0NceIux-3yl_2AqJ?f5XFuaP;V@
zXkB0q$TV~yuxV&8+bkO$@j$^Lbl!{5+8(+M%nL=09q)DlO~H5OP1>EvTGL5TQe&9t
z>p5Y*S6_RAnjIT%PgH@8V6}+5opNY;_=$f=BL2?HfvK_zvgK9Mv-Z;3Sme2i2!3MK
zJ#@hKqYLowh}T>WhuJ>GF1lru5Xl}^TmhsaRJ>oBRWMvIq^YKeBz4dxq&GBFd*R&$
zu-QOX`0H`DDD|$S4zeFeFc{Xh1SsIxGk|r@K`<vQo+kz(n~%DJAp~&zWWUsIAKzR(
zDKDHaPV<%mO&AJ_5U8Z*_-5(!Zt-Nfczkp2_~y#-%`s+31CB=@VstSKwr0}<mug6~
zq+;h<1L+P&CxLkpmuz6{QNzs*oeMlW2{@t^>_c!{sEO35odEt_@u*PZJF4zEdl2wL
zu{s*Kc5$eX368DcD6T`|?z_-j>_l3##C`!U=k8sOJyI^Gmfk*?ytw!9;`V*7tZ53!
zps?!l(ZR*yBjwlK>C5kz-_C*o-}rWJ>Eh0P<)Sj<mP3)_eC&r8LqQYH-xM(kd(Gs5
zMCA8QTMM{i#+S|=ZkP;F$Q@1saRnQeKq>`}cEcT&vd??SNi8~iDsX)kA3>L!7|_ZP
zRj%jsj`^I%rU>x)iP^gVwNmJ@JM5P`j$hX758)c=rYNEy&ORrpwLGRIs$H}wagio&
zw-Fp=EATCRSQO84*#9{8Re0G%`~~*(NRnf7UM@%~qFDVIby%3hnmypx6rZ@>Y=|)L
zkb7xteXu6fTg;63UBm;H+{l1}Z54*asOM~hLoUWKSd88B;0POAEVz%UsXmP~h3LGN
z#YrR*xqQsLkeZpS7oS+QjNTNZ*;8SKj2+mAWlT*NN<x@sm(Aiacn>Je;vzMBu=H&F
zgl{4=Z|M4Y<Y;T4S66gP9Yt4$)Fku1q2-XF<OYn)kOu7>EW$IN$;Z3ZK3zpMn*rVu
zK#%NTa~uakf}>S6*=?A88tkx+6$=>5d9g<cw!i@RQ%`H^Vy0!ZK1KTyql5$-n2H=_
z@iGOMTn#DOydHa1dA0ZIM`-Opx+BEwRZ*wDYF2N0BTXK(EH-{<z+rZx9(%4~)dN^%
zsmg5FI)@TUwd5s8<3-5|^_hw0Fjtc)|J2k%5zQGK>~65ej>sZxRT;;5sQ`bQAQrd?
z)fND?&DS>zP3dB;gCqJkc!P5aO+sQiu3if-1-wUd)@Z{#*UN#;a4>sFTrY}IRd11=
zCYnC-Wg?GpxgWwZWUF(V3PxVNc>TabS4Z?=5cdTvbJu+VofvHQg&l~jDeeN3X1HY3
z`$4pky<0rLbMNF)5gIS--zD&peN<c#$+OeR;q#aGSVUrH)X(O}P9GOembOo3ZkxJ3
z(c3-{7V5G0+b0Xp*{)%CHvS#@J^Z+T{)}U&=ZnQRkDr}S6if}E-widd=6VI|C|ME_
zSM`h$(KgYM&qg)Xvc1r40d@Z+Wc6)RbQ-A~3#vQyu87Cbnb9=(GNL()<vae;f^9si
zZHm3IV<Z(F(HM6eOBiwatKOCC{Y#7Xe~ozJy_t@Gi)hL&ccLS@?MGMNk8a$P*1BT3
ze5NDnZ3bO6Qc)K<Grn;>x@|U>i2w1A(e3;7Os+-i9<^VMKE>yM{&Ne)+LtXH{pX#n
z80*n(+1oGbg9-Kw%~`cf3lp?*+PQ30>pm^3#@G}ew6#3Jr!CJg5OIc7JfF#GQQxYm
zY*b4lt)(C_tp)RJHW|@6t@cH>Om#(ltbP%7p?#5y`UcBriEcF)OJ+NyO}{iUU9m6W
zVHV}BJH!@cvtyP@aWk^Hu8f9heXjI2+H&LLHszZ7KkQvwQyW>5{wwu1yAdWfqW?kn
z^mgyW_G~-0ZxQ>lQAlMdBUGA_1Orc&Fc%vb+ufEu#&~7SO<<5@VX%yC@gVyh{sA#G
z;BMppuqQLmshdiwItA{=8~b2fs&lJ4b?Q`RewklpR3I%;zF}f5usH@q!rCnbgj}?M
zd4!H<1oq8A{B=4G6ZY!vn|sk&eb*I{rNPnhQpb)FiG~|&&J7y_%~xRlU^l7QBL%v^
zSv8TYFxjmp`?`Xs3d3bVPl%%r2GYWyO-6-`+u?=>&=p6@y1Ouv$>xd{wigUEa!l;%
zobZ!tAiBtoA5gY*!ag@cMBO_3<M!YpnsqtH35AA|^%v~efo^j2R3N=~Tr9_McZY#H
z_SGDWj3XenBgk}~WAN?EgOQu;Z(#7P<F)`v5~-^>JN*nr=&T}JP8Bhqf=GNl&yE8F
zb?xroO;=AIJbiz)`mXkn*yfd!tF!NFSA+cOoR|1?4&!L7WYZ0VBAn#yfSA-BK{R$i
zO<;Vl`w7Fd?eK&-6wvWG8=hVv^mmabG<J~Fcuo0%!NHC$!B*Tr{_+M`hmpUWtj$#l
zcM0;oW9x8`t;240c!(`TH0_34h9ZvG@y36qCMoXlb2PJt>xbO&Ft)e`e?vCQcqYOT
zJ?x1hnc1(5W5EQsafb4^bM_7PHFUd!ziWcAdK&F?@Pm@5c6=Godgi>#-6zPnf(cNo
zoMt-Qos0Mm`vQ*&)uZY1;r35Qv(tyy=ML|6^lcRp9d`S-8{<cJ?j3GF{`uuLbm3|5
zv>$!j>)fUk_-Acj^r5+c9(igyZN$KpAg=I_w@i*0&sSIe%j@!b8@2q;gku`_MI&;H
zX><jnv&-NMw_GGIvvJ{-HjX}QR67s8WU>jOLflK6FFjAY6!jLY7IY)CQT+Tq&Wn$w
zt#2$`HF(y=xm4s@!74kQcr({r2hY3#J*{1Cu7fLHx0-0Kzn<=sNoWSb)8{TxtBZ@9
zE+GmX@!0=^>_q_v2DMMi$Pf{Np!Kf_eBJzIEf|zv10D~tcknM+4gA<td@b4TEa?d+
zgfe{LI|^nOz*mmOgSVdV1oAneyvr_(5O=}lP{xY#=z6w+Z7SHNhVF52RKUNp@(G~4
zR~AMbv|YL=XUp3D!V(rPTc-->Atu8y2!ZBb(Y~u^It$$j`e(7NIM%CWpvc;wW$7Tf
z+}bL$Dm083L$`*I$!;Pg+#cjgzubBB%a!?qkz2?hNv>ga$?$Z~<BT8%U+}s|)diJJ
z9`iZv0M(=jQ>kFZ0*1TKI(@8Uqukxs-B!NF>`zaxyn9$u-cOa@=NC?vW!winB6xH$
z{E6j5G}?i4vf+-HpY?bk$wtDrzN#VR`U^ZOll8i?zsr^*k?W0R-N**ZdoIgT41gb~
zy<%{|<yOOr^5{)7jp>+nM1rNF@T}pT70^j$7X5e{Q8+EVR~x8vTyQzB)G;h*4gW5q
zzsmx0VuGg&xzWgd+1qF>j*lK3>PpHO)L)MdgGWnB?b9`>+@xH)`P%c9<@!w=^&7~$
z^tE$pF3|R8WUSAg<d+t7A&?sOaxIzyc$JW*@M>?;r7I|VIdm04cM?F%Ujj&8-Q5Om
zaKPCmWBcI(`1Vc`ebV)Hhb&5vV5zscWvajrodWqqHt^rGq-3?X3mx1)D%XX-NvqpP
z4k-gxoK=e=ztSo%rjhuCNYe>;ZVZSy$J?lm+-HNkja=~Y+o)r*q;>qegar$V2nDNP
zmj&!{HnKxUkV@)#tI(kXiCJs^xN>jr20C=0`^Ph7_c?fL>^7Wap+E;pi;=P`?bag@
z8MvpkRoVUd#UglL+^-*wZoGX}`T51x;nZj(k^}cht!nYxiE%c}+g#;v@h%(Y+;~^m
zR`0Gfx6{gz4A||%Tkjq5>2~v&k<6HSNc6zBhg2dw>uI|VhBCcjT@TuHk%^t=5+K4t
zwz=6G?{4R45)A?AiY0W6;SUBv{>W>d1rSmYQGE>zv*W3s&jt%|_HCtK2r)tnDAI>F
zd|FT>c-k=NEkRUZr@%z0iw_Tp6gaH_>B}Nt@)=+w1>vhtj_^UT>7Z#^_xvFck_r$0
zfv}>DFeIg+aFPO#`~d|qPgDe_LrC7|o>K72$({Q9>m5U|qd_GFL0M3-6XR~41ytgE
z;5WjN5rl(E3XZ@ArVP!K&T|HsNkRD8Pk>i_sdJZXvj3gdXJq8Wu`NUgPG-;#eG?J_
zojGt)KxX_6A$qTz8ER4xzRD9k8R(Oa9iyN+1Fpq;xA8_l2zGK<esnbP?D%mdB6fo9
zx4iAcVkh*e{_U|7|Bv;@37<Ibh10{@xAT#JDChDPKzQPt-T8r1xO8wfP)e+wgof&G
z+Bi!j<&XAEo+}{5U-Uayh>DL8`5|$YwyWX;z$!x6{k>q7qrWX4Z_TvI#lJPg;v+$y
zQ*gy2n*VMni?0NKUV#=Lt>ByhE&gi7`9@qUjU|stOYy_wE<QTX2f|%kn{Ny3;+mf`
zgI#vF9op3I2)mGRO8Z@LmrWkr3o4)$!%K=_1C&ApP5uwSFh08I?}NlR6x_Ys-V~)>
z8{e%WMPZ#{!9Om5#?q)iI6!0R-sc~niH^*KXGykNUmW_9n?&RWMf{{Il^%*2sh&?{
zkqnGw)Vt!{@rVS=A{pDoo>O2jmm`o$XJQF<$-dZ+6a-`H$xJ)+1<HV$G92W>wfAKt
zBoxK~JkFm6YC&s34%sA$OL6{`;L5jpy3YZu1feOPo7q?d_(bndv4;!kY{CL2f1oC5
z-RRhaNR{uSR=dj7-N;Z#J0-P13`6&kIf#{X1RR_Sp{}GeYJVz_LiupIdN}b+9@s!`
zWMmQJR9^GZaEy}&^^@_dvVety0&B+!Kq#1CWlS(!Rl;yvMT0V&&m<abfyqpmUa-U|
ziPq*LqZIq4`^YiWAkLtOh(z4X_8Eg}Qt!BET2Y8U(W}y&H>6+(CO&MUppx&*5o~4L
zok2No21U!F#Xu7QZNMxXIYFpG@>(7_QgUa3GCbY8!h|9w7==I-j;{R#Z=eK*a5!;S
zwCIc)7)FA4jZ@;&0m;~vgb`B{bWSrBR;((f)^A|SDIFL^(Gf%vI1r?|i^9C@G$DIG
zLKJi%c#0MLn&h$8nh3{OU_JOZ_mA5k3Sa-~1w7U8VMbo#2JYo2Nc1~m(K6@2)D7P`
z;ut;!1krI~mRcqTV;@Gbx^r%paBh}b=Vr-!vp&%jL`<w8Mf9?`<MFmV5NC43hk{v5
z7hH;Kx~hW6dKMd&qW~TMKT%=el>cbXr8A~+nexMBnDC?|;g#-d&Lxt~byW4!?1!0b
z&aod!t=murhU2VMOEzEeVvx7kA-(8YtXC3V;TucyC3y3v6%@&|B=S9LdQ$6$%Td{Q
ziUH)&bc%f{E@>v(CaAv2V_GCJmw(etQ|*T@k05dbIZf}^O`vejCRHx#L2h5#=nQt&
z<&0~~g0$}$k~qxQ=xaqh%?XE?h*Zqkq{gpEg^wd%f~FW*UPyBeG0M({&4)}}W71kO
zCS(04(X)=?<swR(C8SKQC?d;86@PO9l$f1Iu`Z`$UfO@0$6CA-#UshOLS!uZ=Df3-
zA3HR$J}=|QvVaa!ZSD|-rXpCC7g4GzS*)66wCXMiPRxQrq?`(#%L@ulXxLhL-Y39@
z6LKo8BAuI^`8H6ABFCpBCiER!_h~($rSYw$^}${Zz9#qzhCm##WK){8T10@3W4+Ml
zS`sLNx0bk<RjwHT0>#t>cxmW_bH!SV)wER!UMLA49gM6a^z-;&<nckS%-NWkj*;Iv
z$X~_dN0ji{E@H>7aGg5WQi#$yS3Jn)u&b+B=W#>^=Kb1_sHQ9lcnFl2MTC8>5US)+
zaSAqLGSVZDV)aeLr6|Hjbkf>(zEd2Kr(aJ~7ya`!=muI;v1v4v>fPMz$%{2iYPnQ>
zf3*rA)XDvuzt&d7x$AWi1$6hOLOR;J#g{yJHt{7Z?`~ZEbyGeS=8qWOlOs;xNgGXa
zP$c^)cOoW;;Her{+CbO4Cpy@1_CXB8WgMc2HHxGlw$&V4;*NNS<Ia?tojQ<KJ+C36
zR>02YTS=THv9xykrM;`+M#*-_1=<c3vEAlzFN7VG2!_kZm|XPkh(6&?oEsToH|oJT
znzxKtlb4B1UlNi-lHW-rCEj7WF~Vw~94~ULZ2ijqIsj^Aqyn~;YF2=HHMA<@Sx#D9
z7Zvk_<oP#r1yWfo$=<qNzTvt+2}`Zq#L}o9F!%z?Z~+;8S0+ipM@$YPr*;sjD-!qb
ztVJtqx4}A)Wr?*c`ZwdypQnTl?%HI;#r>5${E(*(Hiuu|ix2B6pm4UGK^az=Ak}Zg
zUXPO0nA9#~v1<0|N+R2>49DSQj{zT}K@D=BveK+PF!%)ujvl#@j1~%$PqY2j8HebY
z^${Gg1%825S^i&d?kJm?)Utzc?p~ld-QKOT;wg55AVMVieFYS$(2atuzjbAA1Tmc#
z$g4_Kok3tw(b2smwD`Sd_GN~8n5@6W<dICe&(tqv5KP2`De%A)I60u2ngW>~Oy$z$
z=7MSRlhURHGWGmOyuQTnyMacZ4a(Q5xa~lMG76Xwd4b6$G)mSX_=_l@;_Q@g!&*ys
zSlAjiCeY2G(S1md3l4(fov3`mggEY37ZEKi5oL(dV#1oo;tHCq7Q{)aQ?ST<#}R~N
zn;$oNvGPf>aQS!WWh$}yYHL69kP)*>TE>f}22t?MqLa;+Fid=FEGt{PcLqvaZw;WC
z2)pwZwY@v6GyC<SHqO@vT3F-A1DuDmrK8!k!`UiX`Hp6{4}Y9`yEA)SniOY5C>qcN
z!x>=aeYSWw`YbvZFc}Enj&HEBk5-EGlN}sQ+&TJt<(J&U<Hr>`X~m}nkNR5!pRsI8
zo(v8eb*1{cdNqgzlWxwGG`w$l>epT)ouRf};V;~RO=CvFFlCbo_hgw;i5A<)4WnFZ
zZ%V%`+a{$-50gsvf06?5DhmFb?w1C<DJMY}XI<vT@w4coD;w7aWZ~rAR6>_d_KS_W
zdb0XiV9+8O)CYqONq#6{yzh)V+qr#1m~bUB1)s7v7kHfxoDv(nmM26Ek6wS{V_y8?
z+l!J2wiLgiBfZ&<g<6MaG*tPmwph8s@*sxAx!W0+BH`STc?~(*9ge^-ys082Qz2Ul
z(Z~oxDaXMe_3Hl0v{S>3LO6)}pCrGa1VHWLH3$h%gR1l$*N)P`TLCFK5{kwSB}<V;
zwdZ}<BILLR<&kU|CP9x7+7`+FG?6d{(@LgS<GF(n$c|+crpv3RL#<~xzDu|w;D6=)
zr2^%7EfTI6`ZI>vVxiWINK!kl@O1OMV#PuT3!=w0Oj$o}g=ppQ3Km!yLYk1@7q3em
zcMkoPQ}B1*dBqeGwu)`9IT2Skvgt-mXFYLc&v`W3lAOg${xf~fZU7{M@A{25jW-in
z0BViwn?`eeK+Ukj*VE1QjQV<7xvcgX78onPdNFIS+27piN$9a&2zKvRzQK3ng2GfV
z56G$vXO&CYWRK27LfOo_7d5kA&u9vmdUO~-g;y`Dno7**H`*Ypl8`Elp|RIFi|=={
z#5RlgAJiTl$6!C&0H@MmX-9N(K_rbWSV?Zn^k20t&bcG7(;}AfXt!0X31YH$txUJ8
zRDPfRCyk3Pv>5IeCuQMHN-SHp5*pemZEdi16Ycfv)Ci8bL#QfrAPIW@36d<QaP{!z
zGepaWtrSpBX#KQBIVIsuT0-u1%49<}M#?ZD$1@&lEtsY(n3;bDG!P?W2O|#;a#QUQ
zOP)*iXr=ge<Q3wR)0mBR;vhG6kb8`!jcB0drU+Quv@uH%K`R6hx-d;S5OUL*2)2a4
zV^yS@eeS^_$E`6aA#Dz`#V$Cq%vynZtlpR;KxrY9TvUc!ttXSSC)XF=kItSvc&fa+
zaqr}RZUGQ^gB3nd-rt=#nOTVrMM{8zjZQgJMH?Px=ZL~vzyqu57}a}D$y9kZ)Yg2g
zn=<xN0s$?3Y@X*sg%`T!1?T(~0opF;$2>Yyl&pP&>Q?;Ip;8vqf~N}Gu4FtwNYz+C
zG;bOXP}@+f%%eX3vff;#>l9(9h`gay&IN*h>1-8YIKRxLqd0O7<L?%zmnEz`>bRs_
z1pJPYc-9eAW7}`31UTAC)r+{f^dZXfIg6-W9-VrjPMO?$*-^=c<;svmw6tUuu^$bQ
z6jT<f1IguOLLbyszF`}KI+L;0*z0>Fw4@z+m^=<A#EHLT!O6suC@!n*#0k*aWP!FK
zWCUb7OaqVt2&)65ejRbQP0WP3ca@@+_QoI?Cq_hFp`3(!`MoIxk_&FL<8juOz`9L0
zs#ZM>s`gcL5j<~cc0ugv0bDa}^rH)#TaP>ib{33ml__l|W#^nnJ(HsJcw8PKSX8jW
zi%~gcU5#PgngG9w&OksKe1(Vg0Wju>1>`Y<<!C?{AWZ~^6;a*ogj=~4Uj>h9`%lI&
z{*D7_{#i7w%$#M+$>~J5N)ZsO2fI~3nf7i&9?O>qW{tR3B*097R8{xJ_Wnkz5TRWK
z0pl&UMxly#1)c_&8qK{e*AJ^>#~Uw>Ulf0SHT!dY+Y3p(+Bv>miULDH@1-Zu_2v`S
z^>KdNqv?O#DAOs?o=b+&uV!>vghDif#FA|LkRA1-hl`{z48nm1weA)KgEKsK#x8<u
z33i^qim_1LAyHspEiF`Es89#iLV}mTTEf5&B3=a@%R1WiP!S946gfo#Ya#tda4jQD
zEz6ibvHx@tJnh2e^$G0P2vtkizIiJ4MJBoh*}6k?%EhU`ie{(>oeA|!GNG0!eDi{9
zi5T!O6)}XnwW9M2@|l{}V+vJL*o&{IDW2gSJ@Zpw1(3E5m-m;aL<~?tbPmx%U@=@H
zEAgrbe%%oySu^`fC5u_BDuP<%$UIt&pbhM$?75bupBy2#YiLEbIlEdfZOgi}=XSI%
zSeqNt7S4-CJO!?D;L$$Zoa;-d@nmxyrrsE6uCVD(=zVHTQwEsUFg(N#=#nLEoL`#R
zWIUnuK;lm2I{?(vndZyPa5mY)BT#hFv+{%1ttJOqO;#vr<ow2f^?|{RDlVHCtD~iH
zAu*xcxws@Po2H)Dpo>_qYWAR6)3Gk0dw%hpD@xj=S;iQSjs4YiJXo++#_A@LYORq8
zvmuO~6%n9ZCG+MOMruH@v_+D$^C0dEF&L{Nba|1i2Af#UE?{94O^rE+X`+G-#&^jG
zQ5LoZXegUjgkEh!yzj&C;9L`{<HSl*UQt_&F(9I=XGmgI!JaWj8V!G()ru7^V?@K|
z!N>z1%Mdjrp^WCa+J0>v59|k8n?>tf&9b<WT@Jz&*3gVc+*UI5G4S>LwO)O<Q5U0}
z+<&f|6du06J@syB?d0VZ2*@7&J9qn+wXThV%e`$0TSQY=NooBOD_~j!*ZDP%gM9_P
z_wTkC&%B&emeFnj%jTtRX705trrch_i#4%$DIBPP1zWVxPqBn8aD7TJL)oLRKS4UE
zH9-;uwyYw$z~3dq*K@F40d>>@9LQUx%KIxhbP|6=@#2a{N<j<M$^=$nS5dl&<+c5l
zD}o4Wi)z);1XZP0(iSK7B}r%0L1R!=jPCTCe6~jwpO4GB^1}~5D1*x7*Y}2p`m*vL
z5#bGx^=$ANeE`@f;iI8k>>bLmMxQn3*fwuS+L~@5wGP>iZV-bG_>_W<zJ**ops?>g
z>Ef=a;HA2S-lp~4=km8>#lm}>kAe2cS~%ctTthq0D!D_iT9NU>Pr67pp)n_sQxtOy
z`S&)syMefX>gN8FG2}~{c3M+5g-K?Col($Dc@i0xrqLpUoWJtc6~_YrE6SbSr+ayv
z>X(k`$Jw{Mh!wQL)R4qT`Vj~dN^$#Wc1_&!@vB?E%xxZS-#)%wIxbBfZr?b%T{)V{
zMKYgY&&M-Yt<JY<$B*wFjbG!Pzr@aOJUusNluM?j!O7^^(PP=LsT*3`MIZHYO6%#$
zWX*2lbgiQ%RX5`QJ(%wCao7xK_~#)YeUr&n;Y_8T?N1FwYt?n>lL$>CUMDx=^s7B#
zZDp;E<(Q~i+YjhwCJV9bJ}$g61}TczWy>VpmoZYD;VJDkPW>m*aC}a+IMGu&*wI$d
zXP^rb<-Z`0rUH7ttVSe!vby#oRR|o$O+y{ZFZZ_?%;u!lQFXP*5muB`!ie<_1)1sS
zA$9SG)D_|(dAO)>zh<jU=V4?WeG+X-sx+#ISyNN9PQrj-*Iu6}e<E_G-v#t%ncQDp
zZOdK7ii3@WKB4z>T&RZM1>kgDyvC&H2uVFu*96k{&Yr-IbKpt{Lhn`Ztdqa6GcbKz
zB>$KIJ#gf1JKE!5Z(?tx_&<pdZ>IQ75@psuE0#|N`fJ!R;8lN)t$$pZvLe9)J&-BP
z8;FUoFTBsKy&t(HPrlVQwt7w|dj)ZJQmAvQooR;)R@8Ga^OiSAIn+YqtXy~oss{T!
zdjo{$P#>eH;{paEueN3EvN(K$EyLdM+&N_0ts|IE%)P4-IKBQ00%vui)r;xMoBO5_
zHwJ+)rpHt->9JuT;SJC8h1<`k{oVGDZ%wsZP4pTH?ubUmEzryTwKr)6uI}C9fvcQg
zlz4H+Sv@?FLr&ZmyB*!b0agFP4m|BP%(&pf4dIFHiU+lj@G`>XMLHXb)Yb~{40hD^
ziN2gem5H$Jg21C2L>22#N=CJ1sL3FD(i`*$T(B0e>p1=7lg?_^LYVM*K;zuhFApBc
z5SdXVBDFjIv3mGuB#_kXUw&H^#}2Ln4aba2$7J=zwWO9AG|axPJ~J*&E1FK}F|F&b
zsgN3igO5~a=LeF~0}kj(wa3)XMA+rwa7?IX&H8;9O}V)HXR88|)qSNWvwsbed9jk>
z!{}y-!cRO*a!#r@C(+m{NR3O45b`M>6U65hXK`{;FYhl;l9RgA%Up6a5KzAtLg}26
zPMOWQ0fJcqr^X?leh;}#5*Z6832X~e>ZkF3J;@%hsm5%6ES2~Y0y_Xv^-C>{HVu+a
z*~1NN1u`n1S+{q!kJ!2?5R6SX?-e+72MefVxGf2xpzg=;<RipdkcG%F5DbCyS<tNc
z(?ZNY$ec70hc|B><Ze0T&*Spk@y)Ry*^*Bq2!h;;cfVHh=O$|P?VaD>=sbWS1~{2n
z5nMr#w_<56SX``e3q2S9>b_?DwlZaza08KTVj3%r$#Avunc+^UI~E;{=W#D6!WFiH
z&ht(-oz;NnZrH<knPA*P<f?E$oxxfgND<R<M<6MY%<d(W6$_NcfqQ>0ff=7kkzkOY
z6J~rKXlG?{2y{6>c~Mh_fvIIQKCC8nrfemI6KFPg0dslrONcRv=_+jK|I6OHHMfmq
z>7suH(Jws{VV)Cv?{i)!c2D<oPsQ|1cZ|EcBKFG!L_!iQUevruB|S;h)s|GMDw~!q
z%O$H!OVpJ#WlEGSQxCLXtF0Z;XR7xTD`eTOvK;#_oLp;VCIAv7kf5q!I%Y>y$s~Zx
z1i(aMt@W+%Tf#Q<xq_gT_^?UOqym;ACda)h3zLOy^z1AxpC?=HA=l-wEWXBG0gXCt
z{m~aP=E&ybb)&saIx@`)!cw!U1l&M#k7pLHHy&s4JJ<BkL?WWhRXl2x9(vkzUo;tU
z6%ETrKt5oZ7oAuXLK&WSikoqCv)(RGl0E0W=5RAkwj+DarJX%Q&0qSKRiGhFz86vj
z;PwA=w&^!l>7=i)ec@;(0@Ag=88qU*{^yUd&fV32`RCs%=5mPUR((3?gj*2o4Lys7
z!i;11%d{_WU~Vwv8|xF8oWcGc;+6+NazmV_dKqSkh9&nLBCCz{ZHm2J(X3mzh#h44
zE#!$VGjL(5NhJT880}B$kT+tF#ewb;nmwEkw#k+s=e!ttv0$X*FYZ~>LUmGrsdP2m
zA2-u!w!TH9Y~{-&wKm3+b8y3qr^5Zry{xYS`A(igMty36(e)9{|53yyI~ZUtPlgF)
zZN%^+G`J%5?j!m1vElog%{LS|bq+j&oT{X04Q!-UFB^<L=k)E$_G7ze>^i57t!)Z$
z+9s_mMn{v@R+x@Pt|0ZVpw7&5UPn-ql9FVQ;0HUlv4<R#xLV<QsFKDfFNq4+(e*$o
zLJiXp*uiBD_&bkg6*+rpYc{a=x0nnLE7oNWrV`dfP@M;0^ENN++S&Nl+4$z!*tp})
za-$d$a+>X8h`*AYC~VA~WjA=S=3pr}^8ON^jW}%k;)MY$yDZg`J0i`{84;gd$9OC|
zcKS-wdnBkyUarZibz5Rz4!6{>f}KjH2^jF=(%zRA@p3@X^TXoKhg{)9?#{*1j`Orz
z41Lf6{V-Yj@aY{YWv7-8#0O_{`pKtcyd$Ts>2sFyc}N>`U+j<(<Z1K@EPkBhj6<zX
zMFpLT^+tvDTk_>gu!Ck=MMoP?wwo&GDW({;EE7&oq&cG9$qkX!?JBrw-L}f*r;+t{
zB31>;Ose6btp7voX_XW?6|<C_7ZAk9(HuNFIHHD`O%tLV-UHi}$m)4?C4vFy?0|TG
zFg69@^EQH?YC4qj`qX?YQrqNenvKFX72301jjXOmS3?O?aFCkG1b!MV(Z;ouC5>tG
z*>s1`=3K6awe(0#o|CD%Sabib$zZTMdr6zK=Vq<MBVD{J&^f##rTiwD-*a?2-l~6z
zs6mUz$owGZ8L?eYnS1mpa`+bIX}zkdo>$uT4s$PRN58p4%aRmb*yq*eS4CI8D%(0E
zO5p8@vUvfRcFt1@=)|(EGaRenJGN#0JlmtuR56ck7N&;b$jq;XeOv6WYGT-Q%Vcp;
ze1z%J-gj=AGmvKX6ghdvuS)U~uymf{tl>Vl<3=~R-fwdNjQsE8`SYoR*VWzg`Kj~I
zw&kGbU(Dl}K@!ZGN3}cdub<8FQRk=L%<ahW-p$T*9`8p1WF$u{Zlpu0c+4jt)6y)2
zH5?QhC)lls#>@~ij}sYX9-V02uO}nJx*Nvgt2u|0%(%}IcP$+-M|;SOs1pc9d;H<T
zK~F!G=uLNV^{I3s$&LunIu#w1qK3+ihf^^<F6;6sHBTq{LW4%1p311+a2%A4sfyw8
zdlj&mnBAdBLK!_2UG(Zcv;AC~b~CbgxVq^hn&>mbdbHP!`kZGSyXfXjtY^KOUIr7x
zM$(SLUP?FJoKe!yqcNX)x9wVYGd^F!y6S?jV_o$@i&-~aq@fg3SB-;?PD`T7ifT)1
z=!u%KP#FHlB~`glEbkyT$QF#$P(ry-tgStN#=7Z&ox*?Xvi<+lMSF0!V#<@i)&cVt
zd%JAU7QA2`^y%sTpc9DNN`od?C(J43Y|MXCQ4MeICe<763D`1brb7KaKBcakTMzdb
zF;^QwEnz2R;)7_}SIwC^`~dB$IKV3>UBXKzq-NOyIOPaAqPmZi%6-^a?+ATTN_vSW
zgF~NXOCZxJ5t(hQFA3)Kk!|db`D`)~{f8R@K(F6K5>d!EuRo0@Ue?T)Yv~vFyt8`5
zPjq$AFvI=c0@6Q=#`Lfer($^!n?bFmc;{?sK!pAwHOQkoY&PH&(vtN}KD>9jITf9|
zkAKVfcS>S21h`~T5|Jp#N8WimC6U}aCSa8!At4kg-NjF9q)_ISnWHq30i954APKxT
z5DX|&xSyi}901tC)a?c0@g5+0aGTfi@=m~cpMCOux%44dxR`vbLHhRe1EA?{jQPpf
zu8d*edQ7C_6kCPvvW+LZq83%wZyNS}!kqB$luZGghnnZaFSz2JaJ_}G+%GYfy8;EB
zAiA5SxNfzvlf}?3g?bMgI~~^wky;J()q9vaLkTpCls&CA3cDDwUBjGe_Mvlc>>mC@
zR8OTx!lQAhy*X-x`xC<>!;~hY`^=^O88e_q;hDro@|RKLllm?a$2@XkP(bSR^H&EV
zBW4Pto6o5R(CPJ874IzPQlmJA%3ZTq73U#^ABi7I(qMZ|N~sk(^v-6c)MW_Nm3b&n
zBq__-V3}fro~bQ#t+!ttVOq=S_~{f<H9fCV&@zvY-i&RW@7_7j`jMdiZu`#p^I6sc
zDxgf3S-W=+@4tP%*EWCU=g|yXMw3QF<L^p)k_sqP|LT`l|EmU`ACJN-CaPd_$hv^n
z<P5%kTr{e=hKB|(9FUCOa(|fyNh@hNfsl9;wmAbkwXAB-9r1WgDkqE7m*IUGuRqtq
z^ds2vKvIcJPa+A*BmD_0X#}oCF(*X+zM-qu?>oRYnnU#4hJK-^4O98IwZD?DfdFnY
zGpI)rRw9RH*o14@0m*Q`Ih07LUISKv5btf!9TB(6%G=;mlVp|+Hdon5MpAhsc-_-v
z+1kbwiW&C8?<9+C%eC0bH@3&Al&qwjRc-kjq@BDk52{Kc8tRy^kyZi}mgQU^@oKWw
zKjh_O9k#?3dXzC4gLrCXp%5W^*t0S|sI=iDRrwN{Wa4KsM@Mz`W)O)LW)xQvE_$!{
z`+)g^SAaPYYnUam?bI*ki57y!c663KIiJ6c+Qh4P*zDQF4klAn113|@Erg_nH8g_S
zYZr4xlLDnzd;w%OW-oy$mBq%C95Ie9WtxS>hU2}hQksN|ZKaqhfhYc9e*WVAu~T>W
zX8z*#6Yb*5S*U;t#JSZS*b7R<HiPWq%c=Ksv+w6rP<ArZR?s;66cY|8B?7Vy*iB>`
zz-F@woVuzEXWL{>K-$E#s7h+(M@TkaQ#Ug0Va?q*R2EoqdHV3fio31o^6+V{%Ep(Q
z<X~g-9SR;_$=&!AXSlO1G9k*X(#1bQ-U4R^f<{YKmh;S^t(&KWg=RxnoOxke@CnOW
zt3+`ZDGI-JGmo`lZ2&Cjq#o`Uxd?n=1Fe_({;=u-=W0~5?)X}ovZoy+x>!ghd9g%#
zt<3grXNE)mfMV}yu}mZMKW7=>D9}PE{$qjY&$i7YAg$gL+**~kJ#rRH;_JA#sjnsg
zBS?vsBrgH|Yc7Vrn01f^FiTrCS4CeSGhYh+-~5v`dlM~y{^=z9HIGoD2gGC?n$acI
z-<A6#-`$&jTg#I_V#o3UTgMyn=Jw3%X9uLi8%Ql?@hdil-FLCTdm3Qt)|<6iQsebN
z@6*Z5k0Th<*PHsUfiU<;G^QFT4adx6GNByT<;_Bn2_4a+iMTQv7*0oAV!}#+*-Xz!
zD4B_;%~)$pmhIEH`khxe7LRRJr1TLBwYOaN*Rok061wmVZ^DH-<sH4^n>NWtuMymY
z7`@tncS9F%lSuRxtjwaCoMo6;x^Q06NvUP%b#fS)gp;!?4thr>OPF=ML*+djnt@({
zM;uIu0Qv|uW<kmivv^&^`TPVH=`LdR8I`*THXQwGE&6!Af`wVk>Ktp9wDSrL{zk~w
zkxZTL`2#ko(s>;&iyAnU<-D57z+V>M@+ZxLZ(fl-x|3~W+B^e!t1VJ&UgyhSnK&%T
zr}>EBkrY~M-wx6l^eA~|DJUNYg{9c*G-~!F^`yzR0so*k6A#nsZKg+*)q<%q*#@uo
zw*F8PQL!1$yxKrq3I2|W#W+hbG(k#pEf>eD#X%!f>ZO8ld}Z#)Dzgm|OnumPN$akc
zB&w%Ty}Fy?0@hdDi>kR*UxUk>6G6hFxURNDv@SSiAVka`PJA=<ytoJP#7%7=ZuTdJ
z;v<?4Q(z#>r+`X%6ZsZ%kjmmvc?}X~%y<eM6z%Vs(PSpl{l);)`e0M;mux&S<?8vg
za(T9otXa+9gp_{`Q<=Jr=n-v1>!E>-u%3dspNSi(piHq84a1HUz4q4&f6N%zG4>3U
zu-*hcOLS$M%EHD~eC6)(B5Fwf(u6b>=o)hb5n_Q<w(X{k<kd#=_z*kHx72duFM4+p
znR1}9DyUdBls)DJk)+zy4i92LoTEgB!Yvt#w8|_2rHezJoFkGGR1uqr;X)b<$oMiq
zDVgSvk%SmAn?a*uVm{CeTq+H=RMPY@6(f<at7GqGLDUkWj-;72aT`8NY@Ll4M47be
z+4!EuEt?in7szaJQkSZzGBKw1@Gr#~K{lB*w01;sG-7da8Lbxjmk2e|QAO<Pps_!O
z!N-EAqh^&)a~NBlI9zf_#R%oa{p0tgI~NbeFYZ@0lJ9z7oqxZ&157Poc!9F>jYk(t
zpK0&s$3Em%=*D)%;le;*t?^_7|3WNK!-(W8NjKxB$Gn!=lYRV9My{2;v{}}!@LaLQ
z8l}i{q4DG{e!knjm%SGG2%FXEhr#90919Z9yp9if!R^`R(0B>xn~xTN&qZO*aS^24
zz!t46FsI#XalDk{m{p8QZtxmtSSqb70+l;LO?n3Zj@j5&E@jhpSDkb(ZeM#)xq`(p
zYiM=z%4Q@)y(8#XC5&lKQ!zBbr$Vw-3ELiES+!YgdIDig+u@we<*@Kw7+b-~SR?$M
z7}d4#=!?54W?^Pt9zy^WLZ9O0JD{0lU|dsKKAVYG>`kArz3E%t?CkaHeElnk-@CJ{
zPUmPm&WZuCu+!@(753^ev+f_Guv-lyWlvSLthbQgl*c${#m0IfCj~;2cWv5g<2L;@
z!LM@EBg0KtHQT$H!!v3nPT4z9kBwfZXeRQT)*2-?1eXZe5rxW1jmKlSalC~EMY3<W
zX^&SZE9G05SlmA=iV9_|M5}S?ziZ~ytE!v;HHey)UhSiH-#izPWAouQOau0pPXl5_
zq0aUJP7hGqreNxkGqS2;V2LD;YW3}{!uivi=U>d9-vHAA)Uxpmi8U1@kc!PE{pq{0
zhi^W8^loOK443nn=Wp{*&!29;o1LK%|HFuHC(-j&)HlY+qM7uWv)a^<IW(ajtjppO
zRaJ>^rot%1+Y18E&gkY<xyNse6P?Rh^gNl@_$7Lc1zs&DQ0UXM!{!n?o~mvy?nR97
zfUP)=S6I}o;LXQoDrJBGiQe1sp4elgcr)v}y;$8qElzsX_7i!fepVXWvyH74bfLZ!
z)vr&O^Q9O>t)O<r3ffX%ayLp8nF$<IwdR_q^zYk^&(C+)!B<fVuU62VSV0T_OYUlb
zuaHxXwtl~V{2_OjneP{~4}3>m56~p9ia;w`*O2U46ru;kpwZaN`Z>rjWya?$W_K<?
zrYB0|^13HMVzw2u^_KAOG|Hpp0taucnj>*NGmseem(wX?xIg#zj0ppa7Xd+J@^lW7
zWA<Sj;-#k(P@|S)a=pX5QveSGnp+VCT+6}&stN43AnUR!#edO+5PGv<V{GH!96`62
z78Yi=p#+6YIWo)TXb*)bEtr&^F=KjU-|dy&<VtV$bH1a;*se-3<hJ4C-vLny)yZN&
zIl(fVdK4qZ)~bT(b(u6$Nm5sECz$8as1fD6Or^49^iEsCcg#Z@jUwj=7k7F^En!2c
zJ|>!M@gTH_B(uL1w%x;Cwy^zm47{FDN-2q;u`j#$m*TR#j@&qoJ}%141r9j3zu229
zmno#lMDe?#h#vdB*i{+0ZhOfkI0yK?g5B-~3$6cWxLB}y{-wT_N+%Q73=FxmXNR62
zG=sDuX%Y?9pARNpETka@4+rA2V?-frFLx=1<N0F?Mw5JQncZYy+<n+)MuCc(q5VCx
zVZ;4#W@s7lD_SZsY#Q<jhL|_(R1VMoqmfJ!1&4X(=&l>rj4)d!l3KXF77b^ji4@(G
z`Wm|{?8}JVT7!io#cly#TSyd+-x^&pe*YWHw;)M3qqo-xr!!E-9m4iHoCuyre|-~f
z6Gromw6(m!hAXfIj!!3bGmR|Z6u(3DLR1M)AbW8M4Z??LBjz!NpL4|UJK2*<*f4Jo
z{yQY(2tL@rzUHntx|MmzS?Zv<H|2o;45<l|Mq9R#Khi9<wnl!5B$-HFmPh(KnN(Al
zhm1AmAh1}lTFFF59`YwlAS?1Bi#Yfyw#`y;#TGTXTPP@ZOhAK5uCfjRDE1&{FwHK7
z&uG-Jzx0fh?e(Y`wpSyYIC#AAiSb96bIwNM8B@%hhNl~$3ic%QB$8P8(X`eV_CVWD
zq2FL95z)*zdyCWiTaK&p9D#ShEDZ7v-W6uw?k$~VC#Ao@w~URyuCAZYF7dCRSvQGJ
z#CG7SkW>!j1hI%QLNAUu+JO1eoqUfn`W`ufJXipS>|vZB3W1NnU*fD$y%EALC~<oq
zOO8O-S!^n6k2v)0t|Qoy<H^@S0gl-_%nka#$s5wX-=Dn5me?2V{r>0gAMEfA7xyQ%
zi<L=$H{LH!eqY&9PPle)oHVRC`4F9@JZ4G57d!fpv!z(KVjCPBDciHPa|l_;jO-%9
z6+-742D)og#cl>Su4At`k$!ChM}najobNyo!!oW0dB;+P&|}diDA#$-qJ{<P)oL=0
zIofO;yv=rkm}MSO5Vm1i!-6b1YEHPvR_oe^sooq8tqElH0YRVFu|*L<m7LV?rc27<
z*0ShuSzfM46vyhFB$GLZmpiA7uq>hT`VgJhymVnLAf=O|yMzt7C|;A49>;Cqe%X4f
z%7QXZ(1pzoxdZ;*71e1J>u{ns&HU(x=t}1u5KF6*eL{YA4k45+{C)!=7hV$-!JF(A
zM=x6l&Xf?0DZ4dhA7IOph1aYiUZcho1DF0^KoIY4WEAA^658;{Kq8L0=ulvk8GEC~
z%SrL37@<Nqn$79g;B8_%GFjgrjwhl<-^(o9s}9`!5^X@63CQ1n^^JN3tS@Qq>44l=
z#-I-GP3zc)i6E9A#v9f!#rX+_+_6k1$8nCC<IU5Xp*Rdw-8mg+jyQ(&tK2bXTc4lK
z!Im=n2P(vjOC9zjJnRI!2T*9ts>MnBuV|1+on>F2Lc{~=qn7ZfXV?cK2yHu8xJ;-3
z^amq3kRE|B=|NH&0;vJ^qqE<1$VfADe*j`UeY)0TB<uNsIP=K$<Q3d6tbwR~mvUj&
zoTwg@M@VY{Kb#0nKugpwhmo0a5wzhLQycxjb}=4Jj3=S3F-r579bReM;|wx@Wd);6
zIxz2~1)PZP@-(0_2Da2FJ;abv-UZ=0z?e~$7lb9W3jnbqA!XBoUXT;;))wJe%`n&%
zm%gjGsfNw+Y#&hxvI15sqJZJ5=y=Ipt6!G4Eg%XP5KCLN7oLTurR}rFYcT<Cc6ysV
z7PIsmHb=+sL9;z6r}OrQtoG`F$Iqw~G3&{y^VHf@9eaCwtMK~ir(^`5e=#otZ)?ob
zef{jw`QnZ9qy2Yl^Rxw6)rXJHrw)SvaxhYD`h4|}b@wJ$J>RXqo1LM69f}stu>t<~
z@w|Oh+UPy1PmGTE5M39K($i60SzxEPH$5DrjY=)rrj{x2$&}Tg`r?_vXhK<6tS_Zh
zxYueJ)nTV<tJt6k;e6#rX(N?Rg<?ZGv{x3Yk{y6#R!qH)SKx7ZyU<`N7VIu7R0(zp
zg9W%s-YnROG9O?4L^E+S?eat_Vb?}DlQh#KLW8Xdzvg?j-_c$#>>4AyJSe|~MMWs3
zyC!3^hY+q6&@)D2DP@{6!+9MOXE@$lONj!@L579I?~bxFGSK}4jsfxIzc)q)jTZ~?
zL@Euz;<yMmdtvKWQAMfWU=tza`6IQ2azP!*=wm-|M2~cA$jtAna3E`xiEHVkk&5dB
zULZUV)xse4O`J&eO>g+UigF1hTe5SlIS}fNy;G86Nx0O9<T*RSh8-dI<~SkVD7*yS
zyHKx;>4Ap1$yynsqq-Mbf0liAmfby@c!ZT9rq8kuWw^~7v#+A0KuLth4oRu0Z3N3x
zplScfMQ)ZyqP#JC%0exP5Yl1eNzFUh;|<um|ArJi*}$Ci8E>4PH2v+AcZgM7$8ZI4
z)qUtMRfT;1<3`~&#Sajk2S0z^3z)Zg@jdJzHw|>hQ|aN3Q^LjRW^)jqK|NmI=-AfX
zS$@pEf>R5w>o`!qaaYGG$>5G&Wo{%aFzM6~Ii}uN{uVl8dEvs<n#kbpUZWp<Chkfd
zk}SKsW8;b3FX8nACSS!Iz|$$p;ya!2qsz2+{d}qb(oATX`+(*<wXoat@>QFbFg;Mc
z`fwY-@N6+kr8V{=01$wvgE9<X<idqk23futR;O3l0q&ROwes@7EkWUu>P$74@?!QX
zVn>2V=Hy3pZ!4$+^WyT?n=k*S_k|NEz06r-Hpp3(7}m8V5e%pQPk8N}%qj%(1`=`h
zC7Q86Xj<fDap=Xp<Vb%a0<Bas++FQ4EXY=0=`oUn$@+0GBtS-(Y48yrvYt;PkpBlG
znNI2%Bc2+rA15Pb-vH~P8<gH|+EKigUnLRg#5zJ)JnCoZJtpSa2Lt3CRvy9gL-kE-
zv4R7_i8vc39yS1iXQxx&#9N@ws;eM?ejvzm3qoqE$}GiY!J4ZA8ooW|0L>S1XI|)b
zo&aJ%oxixz=zN)=%INwO!bhabm6zsdiR8LUh~HFc^9oux<Ag4?K;J$r+in1HhoMX5
z4Eap4-bb(pCQKv~HdTW7(l(BmmR&3iWibpp0~7jYlZK#f{+v8e#<!oGe+H6>XW7Zu
zCu?tS70$9>yveV;D;xuMot^7wa4Ylj_6SBsLmNcBnsMoZNSi%DBYm2{?=3;ZYog%U
zb;Nk<HQYglens<Z@+)H1AEqlGa+`ooU(7vt|5Zu*zC3oZe&3ExJ6!`cjCu>5ffE3$
zp*->iQsjyPcb4e01sJ<~7Ls*W#BP=@0ZCpIwACd+edEt)MH5`N1Kd*)2reZH9}yZ%
zCC#b`^NtD37nQq&)8gsMC<C`sjh)+ozk*w{Mv2Bvz%9VE2grSUjG!u89e0ZOiL_~#
z-3y>~be00Zu`^WZSqOJ=IWG%Q-9^`Gjt*hpR|s}(6qYRrwW9~t4Y1nek^#_~8cgcL
zybKvFKYBEtczG{uYNj@-XL?~zclfDzIioAr{C0-LRjh^tn95fte?zM>;aYq_aC?CH
zmOY)bv*8doJz;-mWVx}^X}~wRvnZWLiI)PJ1Igb{aRW+w088q*b*Lix+2U>z4k}&)
zY<mcSPT0zfHzN+wR=8K}k5XbXxvB-!5u^0-y;daA1#&D1Zr+v+vCN`E8DN^EZ1z~-
zjfg`^_>m*2We1Z_3>V02R>m@CY<ciYf}D3kTU-#xRdI;WHnS<B5lL?Yz3>vb<jvTl
zUzhz-ph$-=uVHyKjz!MG0jQaFtl|8I$Dw?Bn@`-+X@D2mR~Z&Lz0HY#&uuc!Vh<bW
zF|rD8BYhAntpx=j&-dn;p#sr?FXqn|i_B2@ZfpKsaU8HeXW$?R^eAm0);sI^^|NQ+
zZ7#jry{^#vj{z}CS+rDGhw@e&k?6~q5oNV9JwTiOm>!1Y;UItyoN~~1D$;Rwq=>bI
zVlO>LTpbGp_cz{W#==9YEz|LEZ@=<*>A_g&j~S>&9;nC<G!ND=nSj~YEjkv{lj#ui
zeIhN$80^u0&i43cc2Znr&lZ7OZIVh_3&!!ZolGD~r_i~he&}*NU?;pWFk(QxJG6Py
zdh(i)Cfg@!_W3F6(L(H$m+lK$S`eOoNtW`kOYE#EQanlWch4qxv)Uwpv(^>a)7S4o
z=+V*H#Fs?zJj-t5hwEy*Pg0(y<mnPup+*VnoNdz@ZBpWaUfjsP-<bVyqk3`l<o(SR
z-&D{lFKkU|$UI9~q#9RvGIGr!@@%CZTRvnUvgqaiuo8}15SgwdPO=(uij}MdB9IbQ
zlpL&8O^A?yUUFa_slUnBL?6|CK9Fss0GR~_iDYA^*${k(58iN7m0Rx?QFK+N6*g~D
zCyp+N9K4dSfX68fuht|Jd}%~dmz&^|KKkbF({~fk&llGG%MRXs`etf_x9Gqf<x3L6
zQkfJK39+V!rf1UqKu`&YO1ZoiL#?mHDa0Da@#Q$zSYIb<pB-tkDs#1gpjv^d=67uX
zP|0@o>rH3B?)U61(530@?O2K{=+dlc8&n_hul5?6WqJn_nU}Tt?ofRV@E$fZVPcL#
zC9oF@=3ru|{yb)C18nN*F;!^CO~xW(8UWo48}SHi8)A)Y#@NsJgVAdas6HAkwh|Q5
zfPyM?MK<8Z#fVZwaGGr@)@k&ox;%G|-{?a|7@l`_LiGpQU?!|GjEVtP6#z|M04^tZ
zPzl+Ml2i%%wq%)=W(dc!;sOXC`h552ZJeGhqBwH6Mv{8w+7K9<EeA!&Mp}rRzvx_n
z5mE{Fn}guFq*W0bt8V~iehNOSIXViHFN(8*#sQw9f!hq*t3VN>akX0CZ7m>1v-kS>
z37|IP+vmlafNU&-HZTWK#m-%C>`yexUm-Xnlc)p*5kVeOP^=&rvrd~F#<40{P}s_v
zFqZFEFV^pC7ulVQ<xOcitHCY-3~Ag-8C()%VOI1qHd+QN8jqMp5a-0|fPc<Y>&Ldj
zEj1^`L-u$9brSs<?aG*rkLOr&>TWjj`;Gi<EY1Sl<BF59=fHEIk*Jjv^dWBY29~5Z
z&?H^B*4R0gB)knG-bp3I9nv*KsON|p*($KWRySVWp6L5gFm_!>S7WQOxqx5j5TfDb
zB<Lg29GgR;jNV8OB{OUq$<Yz^+KZcr)&QEEh$X@=rtlRXY+-8hXk1^u9F|GgA||S2
zwpQl{S!!95%z0Hkon#IHnE%u4-zswo2wypVRXe?YI)D19o137DwWR9@5yDsW_)t9`
zj{skcz3A!2>DmCA{J5WAc`%g4uZK2HB(b|TBA7Trq9<59W(#qeO@YHW@f8k|<Xib_
z4)0GHI&pOd7u=n}jmG1NMyY~KupGf70D7SBSV7=p3o({CWS0?tYZ<q%y1*!G%ysO2
z9o@_T<gFI5qB72|8f-$Gj^h~ZoKFF=2u>F3@DRa~a&QEMoK$+tOV5?yk3e7tda$xP
za%;hu*uDNA1KkKG643#}C&@#$VrQ+`Vo;C$+u%Sr^<VyrYQu0U9EzH0BNPsskszo>
zI5{$yP7HKe$eZe0h4e_wjJM&XPz9`?!4W1D<H9N7lceHOh?GJDTRgdR{eV{i1t-;w
zkzyB!@<kqTsXQuu;Vn^C)HbTnNouY_;G$&Y729_T7o`T1^|hgltS#yp=f@kZJMMET
z9fW6M{Z`U*<#j?kc4f~n{HgOiWcg1&k$-0~Pg%ix8GXbub1T(3CV8&%fC)qPvVI35
zN2s90Il(%bl!#|Siku!3S1X+@am&2n0INRc+r727pG_-}N!*KDYC?;J9Ha{7w>|8J
zWg-hwYTrK@L$!C`d=r<7ky3U2#8wcToi+FvtIplhCvkN=ymHI@fCtUhRUehn+1SG!
z1;=iyioyX+7)63hbFD_S_<7aF14{eCBgRMp-I>fKG&g(!434^r($GIZL;rx}&V5p1
zXjMrJ8(Q4T#Ihdl@W{1z$A{WIqsm%gWY*MG@5jYGL8`>;onuU7qI?oClg2DXh|@)H
zSVD>@Y?3l}0l$!~OA6t+F32zyt^mM3aKrRbQ(_L=anzc1Q}fA^nQoQwgj5^6FG%aZ
zEq9mg#-mLT+mGHQ)(fN+$~U8iY5=xvWViEv55cQ6)J0;(TW=<+nQ>9A)A=q@1}&6~
z2*>NZsVb5iC;@f`u^~3N0RWxw9@y4<#MQYRR?KDDCQUK@K-bU(!R4>}GQd*%{mUA&
zy<aRqDW|xr)du>J_1(1oVnHSL4tCrx1z6!`(6P~u)1xFG`3q*TrP6HBKDM;RRE9ys
z0z9O!nMi5%r)j8E3#fwL-ttofD(G?6%_QjUC56Bva!sPm#$m)N53eri=wdHc8e2!G
z(O41n;A>RW{W`)BMJ(#RWT%ukcLWe*^k-mN5{(Yjp-Ei~H;Ec8ifF^{L30RyO)$i1
zN?Latt1Qu?stPsG?^%C<{hC_bh(>j-&q$@4?OkPDp~d_i-+o;?IiKCcx=rh73KQi+
za6f{=32jTkw=tMGy`wA&zMp7<^ijftIAuhTJYZAVUco7_3}KVC4F#1hRvug|Zvw4@
zeflt0Wt%_DsQ_jj>~W9~oFXUzu?MuZt>adOw)+gyI@HZ@WjMgS>blD_g?9Re&?0St
z^QavO_K`|y8KD6x)egdf$~b^e8y;&NNgKY{*quY<fQ0*M(wLVul6D(A>mrRP7cA26
z!{$d>J>X?5Q)K@LTNRPfu_UXI9&rskL;`@_(FJE?FgZFB(bH^+7`DG$u9<upGGglC
zeq4%xT7j=F!%Q*P#MZg=DAX4uS&OJe72k)O;V`LBRal31DY;Z4rq%N@%HM;P6(Ovj
za<K-2tHrf<beuABT=mT!#J3{&GE85?-Q86$a;eMi?j_;IEQZ<LL-aXO#b}ltiqdYS
zUEzXV7L(zMW`*Rz!;;Jf|6W&sB1FX_yBGFm<@(!Og}2L-ZwqthPZt9P;#aPp-`IR}
z`}n)D^1HQpySpYO&7k+=_PzhO!e-$_Jf_E$>!M?<J6H~XBok9+4H%yc8ewI9vVJ`o
ziJEa`p|NNx6;7!OeJOJ&VtqzBtt@;s80=;wBWy+}H!^gW`cNKik`V}!t5Z9YhM_zW
zS*wiC3Rt#AaeZ*mt?-pcvym9gCUw#ZK{}~)BFT;jvBwP#R3~*9%H*C;m>ZGSF9ndI
zvxb0U8$9ZtZ3hHEq1xuNqE<%1gUHb~nu7rltZgksJj#B0Yadq$)FDJTu3{WRS45ve
z{%$M8AG!dUenoDl7Rg*LuG9sNw1Yb~yci?8?c}8=?u9h{%Eil8N0lxaOxo|<p^FzA
z5G(@cl=9&U=z*?qFBK~5O7O@B(&O$^Rz&7lMZi3dWR7S+K!>MvEt6O}siX%1ZNPim
zL6By{2N&djfX_9BRCq)BRz854U$JF29ySr1QNm#!BB>m*N`!iL#R)3ZUUdZAF{=sB
zvtNg7ZL>2W&E0?5Lr!=ZJ&=cz8SaNH;?vpi1YeesUfdR0shxhRh^;81DsAzUKj=v_
zkx6OM#BeAIGN&5Ym&1Az1mDFXt@Q#b2Mkq}z^hF_loxs`ge)m4!QLqHLQ;GyAuT6u
zMZ&z_sGaJ9QEe-vBm_cM+Fgu|2ytZRSTxpeqPmt<1FP+uwo|#QZ2>_b210#Wqo@Uf
zn&eOas;>nskbiwy9F5o4w0M1YbRbcGnoJD6oJ|a|Ey=`aKj?AnIuI8zy%ph>KN!hW
zxZgA)27ANy;X2e?KkhU84DBCzk2b3&^vfU*4A?vPJVL>uWSF#@FnG=_ANgpq&prCJ
zgcdi4l<E_KS@a9mCYiE5(sC5Cm124tRKD0sfTN^v5Iqx6(To0WXqbXCMrKq<7+6@>
z3TkI@;N{598^JPLGgJo4CADW;NQKQHbW;)SazUiLNDqkqvUBAL9eenuFNjxuUZji{
zu|A6<>6{9;6x*76Z0oiyF=P)1qt-b^>;`6t0R^LyI;ebw)0w1BWv{>&LhMG)6Fo{(
zB$|L<GNm%<!K9S47?PJHiet?}Q;SmXA?Q7!4Y5;G1a{8z%WpRBoQ-e2e*Wp%#Ln5o
zU8qJiu_*Y?>u1@Gv+P$Q<tc~9uw4(%CW`POTMUfz93c)<5gd_mP3U;-hmcLO%Bdvf
zbz+5N1!z+eg{TfXrBH42WA|sEH(rmbA`Z=U1N<!QykFhXE*^|s%-*@kk7*xrci!Jz
zxwu!jxcx+=UVXSV`ToJ|#g~&8_vSy0t5=_zLUgA>bPx>-M$Lt=bX3J&J?f~HX95?>
z4g$-&D0lfX&)GrS0!5<M%+Sx`h;jt?Vpq!8QS%d@!^eFr<o{uxk`?pXx(1z|di+}r
zf-R8@*e?DhzXAXvs^PRp-$R0lg5{V*rr42#hO+y(u{+br8(}-PIc5b1?xE;dNtAIC
zwVl*Ks0wL*h2$sZ$K1hBt0B<-0*_{egv8eApCd^~!8S9fHO5Z7u!p6a3b<)Se9aPz
zWgFGSW{{Xt!Pbh18G8)=wZ8xIm@Rp6Gc}MHOpKZd%yENtcYv+CBig9tPw8M{jJ~*=
z4gyxuOBw1cLEp5b<7>Ie?Q&WG<|hfTd(><Ibju!KJ)~z;-o+~OJPE#<1SFn<>=OGl
zr?D?hZk*5;tKpGPY^9YdAj$Ncq?W2|kf$ip1Rdc)&tFv(F)z9|aP8ayOtIGzXo(Tx
z6|Fz&V=uY-qyGA%WQ4tfRG>Wa7c1x=lEfF0PXNHeTt3+J7-htGt?CfcUaUhZ7Hg$=
zL!|!6qDgLkC*=t89)w7lr3@Av8>^sWN`E&{zd%+h0ywJ@l8J?wHCQBpup=keWgM}x
z8QRhgT4QTNX<L*=njy~^mVG#)B!*m|;Z&(D4>%JAm{BDJ$g%{TQPJ&Z?EmMaPKWdB
zbe<qK)(R&V`&)ap|EdPaTeHuNT|7hZ?YY<(2x0qd^6ka~vsm6f-FUmU^ZMBVhuYSr
z-+Xxx1Yv_73wx}`^T&CB>ehC?+rH(i(;pXD>t)N45z>K^o(kzv^>RK*!!QS9$}p5@
z7wU<HL&JJH+#gBw1?~j?M2xf%PAhX_plJ{w2)H!;>J?~y!<v2c+kgH@wO#vLq=J&9
zp|muVln)Ij%`}GIoG8if!~xpJ;s7s{L}t?#F%vT<q7El5a^Y~$vR{-$CWLx+3mrbf
zO;WDBg#Y;(iw94BoZ%q8!Ef<EeNBrb29QglhdqnPP%9`Zp3<&{`;&&*12AM~tmPkI
zbphK$OV>BU@Vk0JEdN^HjObxp(rB*yG2;Vc$)orK#RGDr=gpk@+d=}OfLh%hWr8Hk
zl0#C(7~0cgn!T#47r=5AS5mxKy2GAK9=JK8f!EDJz3Lld@78X<xjpmdUdPON@<#Jk
zbz|;h@+!$YB9D;jEHC-MRKBm4E@mH~gZSm=z5tb)NXN^edq%7pHafUoC)@#RE;anv
zLA?<I0wqurAkhGuqvj2~<gFsR++!M}8od5~QgV%oj!LvV-;S5IE#s%#zPK@Yq_{J`
zVhqc{juFxQ1eJ>OUKq<S<YRhGPOj0opV}}#PnuR<8ouk({+>m&>Azz3&J^0_qFibT
zY#y-9pV~Md$UpB)cLA&Wcg6sCJ=UTFPq{brl1D4}iOdQ^yLnugyIX=Y4n&zx;W(Zo
zX4Ufah00I?%^cdEc{EUSRGgE2zfGB6o{$W`UCh3O3YV3Z8)df~{eUwYpR))4eERO|
z=PQ0>x^QuLdssLxZnp(zekTjZ@b|AK-^lED30uF5GAiT$Exf+XtAwnVHNlm=?E$lN
zV%G*hUAlYO;#mXQaN7f!aG~M`LstGu&ew%YtdI6&#2g!_Z`v&mMZAPhT_maKHXDTZ
z4wgw$`-OdNF;vC09DHOH^^nhm%oQ<OOqyV|P>_+5WY&(!Old6xSr+!^&$51$VG<W(
zXx)a7)C3H&!jEu@W?6PqYsHgpYMZ#2R_nIVs4P{#!jHg=3A-$ocxSswWnEfn6P&Z`
zz#jISxcwgCAf-%1nLlanMNF%6;%##SP_=%E%^rlxCG79`;@SA>S$6zvd{5H31LGNO
zEy#6|sFN4cRx^$AN>f1+G&3zK%XlXei#t195yjv}>ieDQ#pKHSN5>koRWBwVf5_co
zdt`AEB#;N;ulTZPz0KLYC}&c{E+4DPUU}3^RK_jZn#uOZ^D_h*nx9R20FRtHSj0_#
zK#zEL<x4sl-|NEuE~6z5c*5;Yl2$d;#wq|C1`3#fXcm{sQpX!s5f!l6^96Pig|)`!
zJR0}eCfq<8`VTF6t5wc*>4M@CNoa#+3ULDKt$NM090cQ9cNIwR4Je$3i0uVM9z}rn
zpF5GB%La|3Y|s;n){<>;KWeXz+PELeMMVB!FCC(%(^_gEX%2yV<>I9%$NxQ?79y}z
zFsaNTTGiyixJy)BQkjpoUe6XPNXfDvIah_M5|8+!9+gy~axdPY8nAC8EFk6y&YK#}
zBA)q{b$DL2(<Rqg!gepI*Wft5&0H|@#o$f5_D~X3f`sLvq(^O<yfpdxhH$@Ur#{2-
z{D6RQ`IsJ$=;=gq<R^c}p7x$ZVjz@?n(TD^g^S&qr$?ej=#!Ms__j+A3hbG7twV#6
z-k?4FzaelfFgrYO2Z8TF+V~{Yn*eB}-{?si!$CSQ<GqO>?I05;XuD827K--yWcs)?
zA7HDqseZ>r^Qg`~DYs@1Yg+wIVXIK!@qDw&!ijjq<Y<h>HfKQMfcdRMW<1Q+bNV+F
z8R}A4ZOBZfGkVk<#TQ>F95r3IEPrqur2U*9<b~AiiaNibhymHQ5R@p3Xb|aZ95&V;
zY9hoz`xZrvV$~b9*|nv9F5@VxB9GKQ$^ivDY@_g!wY~=8QJEiMEQqLBHyd<6cC`V|
zyN&6kyj99Hl9_m4D&+kmK?sD7!fX(0CSdTx3%Q||_J%3X2(QfRq)f|Y{98n{W(q?Q
zwZ_(LW9vS~D-I}4lj^O~z9I%AfLP;=?Z`x_Wu)a#q5Hy|6N))l+FF!HCHFDMsAK04
zLU6fh-q#M{0ddbv>t>HqM8FFQdi`wg&BXrs{B6iroY*|e=FhTUolWffD&rz^2A9bJ
zNB?{lefk-wGm3t^CRD6xUzXArJ%OUq*u}*7hr745593eX-#@;%|K#F!^}~&hcon(W
z<VMT1JgFnSAQA*;k)tEUAb1iWxz1(ur&V}4_W7_N#2S3fibd~PLH_GBCe@)d!|s?}
zm6>X1k^;k`DA3ulLy_nPS!c^w=(3C{Y_piAc8qChRrr8<3pv|va6-KRrhlS&IBY%f
z*Gxa?k~QzGMGBomPiScwYe1rDjzY5_BFn7x5^g>eKAjh&Bo{f#;__Y@$vfL?6sJKs
zfc!G<LzTD;0s2~+kR@2<ySdu1EdIDp>IHs8*m(ZZGM572PSI?mw1R%uA-Ytg5e!Gr
zzAPyR`=9|qQNmmNK0>)daUU`1NqRk=Xw$p9%WvjZ*@AMu_tl%(!?)W-ADGj-yZ7GQ
z+!2iqnj6{O7@8}DJ(=-TT8~CiNID)NS`d73+8%TmGiuZ+Ldu2)|AH@NNn}NQY8qKW
z;|eSxOQp?OU}j^Yv7T>h+5|wc)a(!EwOOPa8o%m(^*22jISwT>XEhg>%XQj4!Jx4J
zOcxE-A1%!$A`|r7Fmp`PCHrW|X&r|9#bZ<7RRq|?6FQnnY9G-PO3z`2J)=;-MvtaN
z0h<=<5*x%vl_59&3Q36q$9REBZNwoS#;?ev6y_$oR1tm?SBdx=Ux<vPW~8;g`X;bl
zSW|1|X*KDBu$DSlw=+%FZ2>^H?X2K1-ITao6JZG?<TQcaakOsAb}$U}NYuY^JJ<p3
zx!cqMc~8Eq?CXdx)DS&DZZzm-Wi4#4iZ+%D{oz(8MpO$h=wi$(!h_ZXl@B0HP{T1n
zjZci&AluyKgt55<R9HuhM<Ai$(7lTqnn}-`oByDQBwFso#Fs$fXO6^p^*2|~vR{B_
z!M*{fe^$VWONcSv5VCn#c1NI&Z$5qW_F&n~D^bH9JNqcyL8)4hBS4cC;lCh$XhS3a
zrcvgNkTX>kJS;i_zWy!iZHf-IL<j(*+`^u#5~)0r$fYY)%W<rW|AgZQgg2#Y1qFX@
z<UdS4fl@$^jyXXKaL)Y)+Qnr4;^>YO^lb6P)WFYxp4p82(ju;4-|nDYb!-MQf7Wr!
zoYE!h|L`(Ja5h%4BJ>NhO@|l#0IjZ$W!D@r1)uW{iU$^``jA}k^0>Gi;@1a!WyG>W
zKppEy9}`FA#FFcd*}Ay8!Kq~q7uZe=tBU$UBu#kY0?h6bbjy#cVPhbsEh5O9?L;%{
zSm4Or`|gf_X8Gk)4%Zy~a)<D_P}aZ{&>8$v#lu&jo$c@`NE$`5TC*evIrt^|#PMY5
z@n>;hH9J#=0GMS-ubjj%;>xCzjwbBSYkJId1`E`I<~D|(#ZV*btp1gY-G?CtRoK}U
zaR6CG4kk{>$~HhHLlRKcot$>Mz;+*^xW);z6^eWv!WW8=G@1JB#gCY@X*a<W%&wPg
zHK@Kc<cxff!OFfUBh?iDcr5Vbvf!C<)R}hdN%om{+~m4YW-KARZ5L<cWtphO9)38n
zh-z+7x33^fSNt}=!I>gr;RQiUlM*g9+K5}Qu>i=c7{syD<F=ZrIN57A5!7C!@2w0u
z_jWD;qhJg<TNH^s5A&~{0z2-+sXQr_Cd-aT=9wLZFS&B$TEkKnxAPe5mf3`Ba4dBK
z9pLIuKRw^O{_f$_`4{tKn#!2g>!))ZV;8}#;MhHmao;uu4}rlLwK*}yL8mc(Y$(4k
zo`@Ntp2Q~saC}&3LVPU>)>ui%uetG%X$<>qD~nWN%XC6nJ~L|e^ktZf)teF}rMhb$
zasj}*z+(H^a4-`RNe?{sA8iL-A|`tzwr+_wyJ8D#+_7*``|urv&W<}o@SKCL+Xr1?
zUsb_Veh)`G-x^l{)!$vkJaSVU830EW!x)+5<%6ls;ie-I^0BHU^MkfJ-deR_&ym==
zg`Nj#WUP2m>eAM|fE(EjMmn*GVT{i)fbkiIB@Wo<%R@?!aO;*83!sDWs{&-2yTg3+
z4-3cdOSiyTe=zogp{7T;Lsq$x?JK;2*mBu8YU@-G2X(G|<^iZ43IC2f!d>xog?;sg
zRi_(;U38wRBA>TJ$+}0vks}wZ;(%NWoqNV)-3VJ~!n!iBYM=M&*y>GYSDj$d*fM$}
zwbQYH7^?zYdgKtDkkan3arp<pN{?{smd#2&frId?Cun+;8KL|JkvWPt=rX`lpRe9#
z__SR!yYYB621rFJtqmqpXbd*PrTq!E)9y6}P*wy&T|ppPdD8K%3QfOVfo*c=OU-oP
z5St6+CJzi;)oBN?Sm-53z$-hUIU1|;$*UP6(5J^$9B_0|nKfrziy85kYs^B4C0^D*
zeozEkjAn?hKY>5BnKq)T9J+ss8qjU5e$YpyoE$hG+h?R&pgkB%RdevyR;)-W4Z`ef
zwT84<eJ!O88h^|f^~X?}8>hA+rsued$WQz0Pu!BRfP*y{&A@1I^W<}+jOZ1(r~v%6
zYCIL}QUe-VSOlvTHC3yu2;@B8b)gl`Qz;@(hFTSIvMQlt_YiAarLeMJ6K;=_^Pr!a
zEJEo5(J8`RA2|>%K-6M7pDV2fAOFeBsG-P{TNyhqEq-_Rv$OF%n^Aa=qgvy8ZXj1c
z&RkBQLWcx05lU5Sls9ZD*901Il*q4*IgNC?6I92Zq2f{P5G&x>siRBpZ#=>}PnDI6
z>3R0);{F}YW}MzZ(CJ1MnROgw+I&AZ`+hF3Os8A4VL=r{RRE&-6p@v2n_X%z$y#p0
zXNZ!wAut{i11xIYn8b4}h?577Rc`7=84;?A=Z`GmyzTk4;o!T1AiF^Ffoe8)5g*(=
z`QXmj9$Yreb-}JZP<wpIBquQC5V4ze!O_b%cJ_#NNAiJWkMH1TcK$7>&=J<DIwT;*
z1@sB53AmvliZa(IWHtLX!eW@w&mO?`=Kr3M0o=je1v_H1ikmFNCIFM~S}blF*HWW8
zyz`Hv=CB=M(fSgp!9+w)j_Tb)Ewsj3Iv}mDV%o(l5aw{e8jA{miulS3)u_S{?z7W*
z&_i<rSW4RI6f?NTDO!9P&<Qyy%jl2TOGp)kG{`0a8sxVu6O^&cVGlgeFSJoXN}ohD
ztU^`}_#_|^Z^J4TRa&C>{<6dzsGwAff7He~ni6cj4cx(H`CI`Bl?NVur~B-eI^wLl
zS@;PLHPqZcz~2vk&nXRlU;cxGY@;*2Z^IGVI9%${e|o{cU7oZe{?%u1R-O_b5UfBI
z^mzW|9Cj)~{H)W>RRRD!YG3`>A^xb@1CO(Eko}KFEE5_Sim8UWMZ0!^@-D*MgUpcc
zO(bI;LGItm!!9wzy~Xo3Vbdx~%aez=ciGG%Igm_A#o^X{4HqaCwu=$n0_oyc>Zl^+
z8b^m8&hwW8FA@&aaf|?AbZBfqV?|m{5K7V(PoIQMt0*KtC)#(suWC{BEuNPHn^sW=
z`nqV@aTLg6#CvYE+)IBfrQe#YvG^gn3d1i!QsBe{dRe)%>?R4XUtSn~OH|CQ*|P1n
zg&z9?$)+pB<8RHs|8o1oVn?8$t1$eQ$T6$geGU{YXTC^B$*~~OcR7Wo_!P2K$U9CU
zkLdfBEum@^G`_j0cgH?GLhpNMXfJz(*u`5wJ;v@_T~uT2tDySJjF(9iA~^17%PO*%
z2K}s$3X%B<5tiTQ$uvAfUlURKy~_^HZ}GeourXIf#>_U!O?U?9-R|tA@S2YH&lfvF
zHZ7#4))sbC@3+as5J)3vI#89;MzR-zwZL!E!(k(p67;5UBA!kri0&lUP9vgWsz6E}
zqMLCed0A8^_P(DI3UMF@=O8j|h$cCQj2;qKq>SH=EuOtW^^+vYsf-c#GR4{#(U7R&
z2(s$Z`UG}0fy#7j%L9AY>l7p>nhe(M`60MZ*gfP6)UZqwT#XgHcduA@6ZgQkC8;zh
z?icj*kl%#$rn*tY{Sow_n;I{L1gB&xLxnw9XXU~rTEv~JG^&j=#E0Lei(vb%V>(1x
zoWX|UT907hABbD^e{QM@s`J}KYG5Rtxn?v~jv9P<x4xT7X9hK0OAJNpYmxdI?8DWe
zz7{1O7ldhE7TF;KY;`cMvDM*aQ4#O?wQ2Mk$uuH6pqLmp)9F+ixg#W`XvDSuk?BjC
zy}iitikOD>Pkg!JR-%FQs9)9dtQ{WnbFo$_VnS@W984s`;6Ji{bg^WPQUWk6nGpks
zUwpaLlUjeKzL^>{V2P#8M%;)bfMjF&0+EEul8O3@y}d~T1sAPF8_8hNpR$vv7gE3l
zV^@#oQGXIOI3sMxOtaImOlSCFq;V%dw~W&A?cr!>2>2AyrzjI&>qwNQBAjVX(!DI=
zSI$P>nn6seWIwQgT(BZi$@0A1l?RoqB2cF`)Ug?BL^xy{$F0idTS#VOCozs+py3+r
zxySywWO^AC(_vQb5}Ld`zHL#)a)Qe+gQN=D^k@vt;`|Zz$NpQk8c@xyqC*_dsYxkl
zW>fqaIA9xhkboWxl_#K$4WPh*LSafMvNcFX2Ai*Px|$@dDxMgFSLtHQ#h{tCJsixr
z1I}0=+X8EKC6XYkYS^FFaM;v2H;s&(gvKlxGb)W72r!%0n3{p#(bTa!j%n4ZUN*gp
zGRNt%lV{m|B$rK}P24yeuL9RBTRh7?g98Lf?R@s&ytdXvQWJRJ2|A!Ts0ap?zm7me
zbMdp+wR_mS77PLO#ax@Rz&z4mcM{f`iDo%LAe%*av1HZyayBC9v5dQOoQ;N}osIn)
zNIF}<He8N#Pbhol?X8O~lAl%C;tSljPG73{Wy>l+CXr6)snT?6U!ZL0l8quK#)kd^
zz?I~rzyEAS`@TF+(B#G4G40}6;ltGY#r-GmCub3=yj{7NMCg+9<JkTi)%R<8Cq?e!
ze)Yp->BFaxfyWNkE*`9WzpqkQNGT4$3!7<;Crd4mql04Rlm^Wb?3rym*=?u)+0@_)
z7FuJ6J5W+jxltIXfZrwgEzj$yH7oJEw%=>QYG*78opp1zZ8h1dbT<mJ5?pil6&1Rq
z|JVrDUwgDbYdBHs9wAY#;_}AeG&tFxlbA>P#Rr^*=l=5G<U3^IQF8)#3H@EC?#4xG
zBx|R_z;<WcO$mM-aP1uIxeZ>uZRa5db85V@o8-am+~uX&kcc-WD#R^uR2M72S-oMo
zxZTwjz1D|bbi5@5e^+t;A`K+EfK~_r?@kDK_<@W!l~4q>Mw~!7!Xr-WyEGEt=DkLN
zSrvsPSt#2<&dXt(Y87$d0@aYCj`9@8YCRH)$jy;tEGO^HSbGZFB;|vp>ezs=gMFxB
zGV126GRpi5=0xbp@Tho$8`n}G9Ge*(Ne!68Ts$_FesOm&qnYSwu(vp8|H+6Nd>>&R
zNnhgSJ)j7ZN5Wq3_5I-h60^*%Fu<&c>2?#GyZ~lWAPdm)0@x>J0fC|KH2Z^^HW<FU
zgPNegRGNHMd3AI;E>X!R1Wp455JFFF$bimZAE4c*b3~FlepNc1MJSxJrm7kW*W!4u
zj-W7vU{op61Am1w6=|(6S<lB)De&bi_1b|%jNQ)qqcl<lQ|S?QH*Fk29ZLV950}IP
zdW`=1BM>rB$$s-2bF`%aJF4292;JI(4W-{~;_$U4ALdX}cV8S^7FF`7-MDRZ4*5@E
ziM(-sbjO}$?L4bq-?%NApzPZJ7j!x7(+swl#SUpahp4YJZGay^*#{u=d&#UtUUW~x
zEP`o^spw=UHd79Bf<ZWo^uv;f;PS(Z7`&Aw_;|ONqCgo2eh7_49K$Xx?80d*iXE%s
zEVglos)!@=Gn&GK)}7rH0*DRY$Lf8h(_7l<Z9l8R=|pg*Ba{)ue&%sUJNly>pxb3b
zq(`7n^Y&KZ&HYF3_D;TA+B^SZ{>`Tw%r~O&INNx>_Z8cCzISwfxA1m(^3A7@&KDmB
z%ACN#UO%0FT^&0gn|u9i?z{V6y?*xSeD}`z;%(c&mUP(p(~Y;=ML1^hhP9~w)#q05
zc2Yw%2>)@o-=`vNb45jH9SJF&x_cKfc6ZINp6aR-fVOny5{L2zn#%bpqUkk#NDtaJ
z6^|)XNC%B%G<0<&75angZSNv2h9}x|%!&CO9dlU;F@*MXs=ainX)*Yr<X(s9vJ+y0
zj^&mL6Bi|qlv80mf3KFRMCyq!yx!0co&@744|I81FI3v6=0Oi^DdeM^<bsuYy320i
z<#sMFoyEN>j{p1>s&(^&C9il?qPgtk6-O<ZF0xfR6@%#_VTDr4*S$HRvVe<f#)`c*
zOMsM>mbxpKm9R2IrBp5{UB$h+$))C}xK}Ch%uH!hmgI7>Q?U4lQg%vf3%jvwReDVH
z_C^8!<tv+J#+dy<qPqsnCgqfqsmA(t$o5t(*tnq9tJlgznL%@brw{m5(f#3zK(@UM
zHw&WqGCJwQ{=mf`|H{tWr6?qqu1)kL_0gBbrnHBMnHqqNTGDt~Gh-Pse*=|^1TEX&
zQ2S7mgsseOh-)9g6QRm+Z7mjw>YSYJiVs;_K6B6wGbX}mJ<?6R$fzzPK()wEafp<4
z9Vx9pkxVg{uTSY_k*#+VpnHpsTIj@CCDeb9ib%Ff(-tnbaOCF#QYVw3;7(da-W)26
z_!o!0)T)tD!}@9H=SMcRTy*1FED=d)dM2F!HKs1=NIFDd*({);#7IVf=LPK8^YN88
zyI;Mo?n;iVO$v7JY+~jt`w$WGB7)~@2$*Bl$(swWpRY6n>|NKMoQA}0p21xfxc3SE
zEsHE~_FFR(8n!wLRAQ0Iw%~Y+TC2decd$hjKOCW9zK(GA`eoIpyttG9aBC8USgV{^
zs{u}3tX^!4aS>NUth=gIc@u$Eym8!gw=8x~L*J5M-9vkSg<x4ijSF%U?w+<`x<5jl
zN|$w@NfUKXP}<uGpz>v)1%U0MMCBBwyO#x?S#Tk@wiPM;Axl(F1CnS7kruR8d{e_c
z$Z_4g=PomPsl_TCQlj|ox5<dcN8q!TPLA}LL6)#~<tv4nD6Z7<#T=9CvieA!YFGkF
z$hOpw{CJFi_g~diA;j9N$9VK%Aehim*s@hKl)W=__NB*+Lh_iY^(O0&;sc36U>#`D
zdcIE`5uCPh49bXuZFbZP#eG`@3$jCI5X`F~T1RYz*;cUal4@Q~Bb-D)QkjEY=n}em
zj}<&<K{9bUbmgkh`H=KDZy>aFsnsx7+vsq*;7?66r`b21$Ei2vK%9a~ZN6Q9_V(!s
z`LA#8JUE|NBt6VasE+yFSoz)B{CD@hdV9D`?k)}RX6u&UF@6+?Na{^mx{qBbMg~MG
z{zC~?4jVlH^$Zy;3Y8xlJ=#^irvp6E`x7{_$B3AaUefW<*iZiM+quHGbCqvrceQV4
zOYEO-uNT<gj;+{B`<uOQHohtTWlH<Y>|YkXDg5ONPr!u7GIliXHuE@#h<U(ku>(2*
zH~O3XZyx_;_M69S%x^lX9`+bWfhw^_^cj0<51^>+tEU<yqp*j00s>2H6TB=IJ9g8@
zx_QW+@5fLt_XaARvA^DM%}r6SW`|sV8CBpB246T#SDmE5xneJ^xRFi`m>@?H&AhCE
z+SRp=zzlm8!2~lSiKNEnp%;kr@y8!)DbEQR9{I`N{c6}S<0FxX#y)gJcyye+-km~t
zf*lz0ejW@qb-vDhUHjA2pT@pE{L}c?l|N0ge<#1*`%_l?`shy+?BmIwCcdtG{p{=F
z*H8X*{p;r)yNmzZum1igKA%O1amX9`apxOqq`~emdQ9(&H!^hbqeimFOf#F%?8qSR
zWjFPIZL$aU@&77*Q-*&%AE90of8Efa9vxzfmM1`lw<6<tjeSx76HMS|5Fy#22ndBz
zDqgy(Y-;@b=D!#IeG5+K|MCN)0Yb8)IhBMukC6=ag!R)eH(;hl6Vbl<TC~33@o@I&
z{Q1*AKl<}m+Ml2Q`O*J3_U9V>^+NIR7N@;$u^HjNoaYZ`MiY2w5B_p#Q@g4UW;#|0
zGy&u_dh4}o_5HXfa_H|YuF>kCon<Rrh1NrVmL31?MDE*(Ti;H69{P4-4L>~)o7jgt
z-az9D1~AIRjL%~1W7ZZRdB{F{p@|pwj<k!(mG|YzPDe(EtU0Cgb=B;MY{&GV^EU=Y
z@fZBv#$`?`o!@N&=7gBV4uv$T4$)ixa<>FVm;Xkd`rkUw9sVA9T>;SHjHo`M$GxUa
zya%UE`kI$L6gS%3&Rvg_iAR0Dw&DrDr;@@4PVX_bl+G@2u;XhJ<0!k28@n@&ogLEt
zn!!YIA7E?+QldfAYX<>XuF>nM%gdhFVR&Lc{<t&h>i-4E!w-HBPbT~>8yIcC$sY1M
z*;DXf9gZ~)zi1phZ5-Ze9Bi|{TH|n{akz{-7PZDfk^KYv50=^Aj@=FTHI`CvYX_r7
zI?jIhG{84;bD(2`Ke7h>_3dQOh>`NT6RD)~W1mX?cMt`Q!)o5+=K_-d{nvWh|KUUA
zt#}_vagrGfam<q7*fIt_Dx#W6&c6%;WZ&)xARB;f5<l+vxTL`!c=W3J>Io*OXCW;0
zKldR*fOoWG(|$H=HP@&<Zd5lKM-z?ecH`)Jqq^LvK5SH<G^*o`Y8gHsjo|?^Y*)wr
z*zfqW7pvOUOgt0Lq%xkcyV~r}ByHZ-->{$OZ^6&I(b;4iGW|Y}JO=W*I`j6~lef=5
zf3vc1eq-(YZpZFLbmO4>-GlMh$4{Y=M+^4Y|2`Hq28<{(VVNxyYBq=LQIDTRqlw{^
zmhLyS)JQ6A#I!W<L(`g`(sVGS_+-9<ViU<sT>Gt&jN+Hyj->k&aXdyJ9E_S_OqNoX
zq3G-(Yxq5j5)d<Vh>K8wvs$i51>h19SLl>284G^m_K_DYkt@=xfE~*c#r~3`;}Z97
zE!bNXBd<bp^^^iH$5+Ge){SscOpsCq?m7slNWj|iyKz(A2j|#RHkq&1dvehJp14XV
zbaPvLh6Mi8-5H2M{2sKvU(SL+#9)HCVD;T09rWE_EYx??`ilkanm&+#XEQNWf6kv_
z4N?IjgNbA!Lo{&q&<>zVI9_>%J@9nl8O4N&e@F^ha1b?!X;*P2VM|67U(GyPp^av=
z-b}pD1Y8C6AYjxGeZp}=aPZFt@w5=9j%#`*HJa(IANMDf>jSgiV<6V8N41m*Cj=z1
zbOw20u>{@|;gCboU|hjKV}dHN!d9FQBmCqPb1z_YgDs;OTvzJ_5R?0<-e=<IF=XYZ
zc=Uh`bp`bxerkMD-wbDfX$3hM_Ept7A9kSf33UuAXdvhS-^WmPoNGaVKzYZ)zg3if
zs0smxJ)3TWRS%}n*ji#8v#rx8=pfV^uGoYk=&7^5@@LO>x^y}vo=d{D(My;|VhL0%
ziyV|nkW`Vpoo!1QA9Gq;AJa++VcS@{Eh%$Ob%?>w+mq!o3v?(~jHn84Zwxn05CknX
zAF@J3f^G}{%<qZqe5SD9aT}6dMDgb(4RTwo8k{PaktH30j}Cu!M(EjL&jiHw>EYqC
zS43Mh1L_$6F~fG-CM{~8Kz0l0S7k8ch-R?*1V$}Ee4?BWbOZz=Vr)rE>&X;;#wW|N
zTR>yNNEv+@Giku7@Y93*Y?GcTy0r$LnquM$<hX#K#Pr$3gR}8@%^9Q_FQ62}`kT$w
zcN5Ry^Vm3E|5<iY3`;G(LV3fjv+QF`jX6A<*g%bmFV6Q?->uDKBFqBh$Yf{F#;4E5
zx4YAru*Bqr_B#Z1iY_t@K7v5lPmR<WJQ8KDIHf6Qf;TBih?y>i#*~^YWztNEq?`(#
zze}3VvQTg=HYMoH$V-yC_j^J{qkBb(i@6mLiI`fsxc@}ExOYTBulH+^h;#Si%g;aL
z?!4cmC;I#SV{wfT9)qNwwUzG=tM9)oT-=}3F6QzdZeNGv7uk;c+J~jti{(w#Jf1Hw
zC^fEij$1t@)%(Vi6H=3$kqI_eXv1vd$s<U>u~L9+ooxwCha&1tF14oV*bl1MhPDgy
zOSOI<$`IljPFke&?LD$Xr|GyOd42AE2atu>vZ*oDPz`ge-Ah6Q^Vq%2Ph*1<v7}er
z*7F5RViOXQa+1Cv3n<gVj>E!B$Weru-aj<tLwqF_<cp{zwf|kJH)}XZuIZ3K>Jj-C
z{RS@o(4^Af2-tDC&t3LGv&8;k>x^cTmH_d8DIC#J5`rWk#!-qQPqG#3c(6?yVi%So
zsXsx#iuINMHn!6gq7l2RcqL2i3QjMJbS`V40Qpx{sTR>FFe9Q+fE21tQipJjS!$xZ
zShi7El4=oqR;}kNHZcjhTI(ynT4&#%VM<q-(^PDgrI$rw*mYvA@GP%ARGL_qBX%P@
z@q1p;z|^_vkr*~LNCbPC)#Ii?j!jC2VYNh3AJv9O1`_d+VLX7lHP=ksJ*-`Oad&it
zZRn?KH~BRyCx{7uf|)s5!g49D>D(lWSuO?n4_C-60)rm*>fA6I!ep|bmJpYat-U&Y
zRc3#moi4troMxR&@QQXic{=gx`KzZ$%Bf-Q&KMEOfkCdlDnkxX>2!v*r989>B$YJ>
z%SgrIRrz%8)d`FRsRO({9*e1IRkj^8{6%saUKnepd}R<5{dN5jct7;RgZsoRPBsq`
zG0i}$u17?pK#5|Yevrf%RS)xK`;vMLep?tFb}A1~fz}*Wo+fdL6HzRBlhhWPg&R6Y
zWRaRg`!JIYg(PJ1+7xEGp^yYuvyf?Pmb`<AT)B!*h$q*1CR;evrcJ1KRmeu{bG8(m
zr=?&qgAMLV4wbJH3sR@z#8&xwPh`$H0@7AeTuElumDwR0m7BmWTtp+pU22k!5ckJ9
z$`mO**s}KWjvPCo5mEU6sk@fuwv8?QS5UL(RAoDhTjj(_=BDDgnK*Z@CYuyQLKI9A
zPykRewbH|S*pgp~CBJ0LjAT7*Nv7VkY)cD{x3)Ajim7-*RxCSHmTUgP^f?WHAO&=z
zA>~e{T%!2^8jVJyanAY9`94a8frP|$Bol4}3yxC8aZyNk?qJZ+UPo<=<(*zCrwxT+
zm*@?Q%rX+Dk*)y|1Z?el^HMGd$mrVwF%`V!Y28`v_N_|~d}@<Uzy};u(zNi6WXHMp
z`?`NyDDD^f-z`6R_hkIdv#~eD4RpEO!1tSHTko#T{Lj<j{l{y+ZCxR*4n6Ar55wN*
z^8Us(vKdD;gOM5tJ(*dAZmcf;_R~X}DM<SBu4m)j+S0p+H{G~8>B^hp)c*LoUsve1
zZwJoXoq77lk??4>thR)p(skU(wOM)$ON0b>#lwL(cQ!0BgS%@+Pkh%BPpYE5h5bxZ
zm<A%Q7*<BtLTiZE<u_)OOq`evO31#}_Km(HIZHWfwSJdUjONHYXOwgr>>KgjHB&eb
z6m~O|c0CSLG0M&+F^~+^${l6j@8X7V$B}O1lh6Mgb-&nYAZiyM=QbMZjyy-q@3Ve)
zPn2BSv|<G%+mfH8Pc^6xJ3@-9Ervr2N!>C~C=oqcHfhv$LjX$An#Qb98*6bXw`svD
z9m`zUq)xJJj6d!GZ=BL0`089M;!$wmX8<M0J^(n_ojOV?qODj%zP*ekoMVUvFb9d1
zb!bb|xC(<@g?HH*0CM6XPX}VixDO7MGjbw{ccZ`d&e<9?Ju?`gEt1z`$)zK|UBW=;
zm|d#K7n^a?>x$Rj-@7dxT)%cO%}li8X0p{P|N0wbEH717?%D-^c8o17O9vw|=6BTr
z6k%TY!zo|22+|UM3lXA64WX#0>0nFFx8p9==2&HBf|n$AN{P41n%bFhre<lmgP$^n
zL55SMJXzm0SxU%K=#(&@M6V|h71&{t>3DOHNFOu0#a15o&qV6?2)5HAu71jiNPfQL
zc%oyrOC2Fgspb?e;Sb~V2bb$_jMiasQ-SUg4`^^_bo;S~s}HI1dKw39bbBd88qeRn
zjtuop_MeWt{qa(`Y#!oCxNEo1e%urt6yp3sZkL@YCNBTtlS%_ZF5X5P<<~Kz#!0U!
zBAW(fewe0G7rr_e)6vC6{^3vl=^PFN*8&{bPW>4;-1B<X^*Wu8u$@1;GRptc{x>9<
zU=qV@CJQP5shp)lwuxigprJ!LpFN(e5f}!?76KX$oa@Ju{gG}~jHK+^KPDdPIPyA5
z4=iF;Q1STEJrwqcdJHhSa3c8HTW+>PozA<z5XbRlYeW%^->S((+o=%uhv*Csvi$6n
zi^rJ8P?O+@l0$KfIS)==L)7cZ$Xr0x4Vg@wsOz|24g$92XR!X@QLR7Dd;)6RLJ|Pg
zb#Sr&CA4o{Ekz%=e!#=K;iEATl&Nr?yn2!RRacUNbR_R-Fn`R*{@p9@3)iU4J^X%Z
z3jljp+Wx4R{TQOvtK>DLU0$d4z|x?<Woso+83$i<-IE9q7GUplXgf%kr+#OQou)JS
ztRA*2;Ru_0kaGW3<nIi9*WK5W3lld=)*l0&eXKC^7$Ur@6vOrSLa_Zf6&QGoIs~?&
zfsHa#7<vo+rCWawP&jML4`?dlz8+tT417+U0VDw61$4I-Wk3ytGh)wxaF{K^F{=(=
zIHmls@zzGSPV^z<2d%aP$4Pr>(nwIg4>Y^%ll&)HM52v4hGPgXu98>Jlm}{$ZX{|1
zg?p(1UkwX4O2>pZjv=WAaV0nFT}zu@!}cz5zit-^d|X)^@>1akGS~lKAcKJIEt2oo
zx3|d57@(9M6d?p?Z;{NU9>59@dAOo8hz06_w*A-!UF{lNVv6t`6|_6-?fzkqH%rgN
zWpRRbx?4<6WK<36bL6(RkO0QvMgn&ifdtNe^id-e&?Nm$)e?G#?U?b%H=NC5pb}{+
z=@1MYO;UC6Ms;wtdSRh@ewBP!f)D2l)xkSFCXT)Sa<M!OOdS1tgXB$jxga(1po%8y
zCA&(VdE#OF*A#_d<#;_@UM@dCE|Ay5q&tuH2H8CTehz^#MIMg4SKk3|cgaTW*FobL
z(tD)s`#dxy;Jkez6Cv=EV`>a9hnY1kxgFvMow~s!b3l#`)%D6+fFaQWJx}J4$4#Lj
z2g(k*mGoJQf=5u<2LR%Ze<RIj71MpNvUh+<5VP+TevgpF<KBzKm)$QGUld>Vk^djR
z*myZCz3hE?5ji~8Up#%WD7}~tLEa%S63F0T54W=$kHy1n-Yh*OdvXqtg&oq0+Ui<b
zJT4FKEw@Q-xViG;-T?88%G0lh*@F(W9!)VdmC9<ms#)q8V)dL+V4cd8m;^k?@!^Dd
z!i)nleF%(O5{CifFI$cpYQFMt$R}dM<^IUOy87xS`FkL}y7ub&tFc!jukMk*2kfzd
zfK3x6M%_XXCQrjKjUgN!Er`g?+?qW!8KtBlx2UPCDH)2EFgw{r(LfN=B<*hO=6APt
zi$C}57Rb9b@(;K|9!mIeh5S9*&66*fcnuPV<icMuBoDzW9By-D5V2>3rH0!FSFL0^
zn@q|n#b{AWYyksHY|cT{NMI>QX`ao4f-*d;vPn7^8+?EH+`+Aa<i=EZtRS@og`C}g
z{K#G*kQC&o6B+nHnk1SLw{wm>dk3~R8f2Du2y1Pq{6i@{Jk-?h9DHITJdzNBISND}
zO|f=(jJZ!S9$!ck9of6su{)ly%940@HSe%t2iP+b*hGZ-3#207n5{~il2D)LbtxxH
zUn(t%sSt-)0bXS03aQhJGQWsnmkEry<SX;!x*Ur8@NniPH%3yZ%;YPxLsCN=hD$$^
z3w>4F098P$zpOfo=?hs?(lW<hvI8LktApV#Z`m{&W@JRcFk>Hw5~0@FwqZ`mq__AQ
zr6RdjS?@!3lC>4&B)L#oD^%9zDR~KxmZWaJm1dde2;?MxNhumIUYYi7#pNc!?O?rE
zrIQAR`P-Q(J;+gVV|!z#yKcFuGxxWd6RbvA(&ThA#Ix8XE)m*^tj?d6)-(`>3}^RR
zboSdEtP_rA;t&l1QQ}rd`tcD{Ps5_Ck4;_DgL&FR?&oO?@yw7PxSS?+^N+0ZVP+s@
zG?}Or3&>(}E8I@FfP4j-X5_}OX9hQ6LHzar{*t}Hz5ZVYiA%gBxh^vE!nCVG_q!bj
zYLebPxW0dRf`)-Yq&zf9zfFyjeP}?3$6wMU{TU`BNI3kv(n0!GKV06xPoo`qI@<>2
zz=GKj;Ufc*BOOw36i8--lNxF^f}h*$9APT*5o|yG_dE!A)?Xo+gpBZi!(|X8kZACT
zV}AucII=E3wK}s}QV{t+t-}PcWr58G!&nsUH&j!B+M{@@Y$TN<oTh-MNAdS*N_rrO
zz57{@job!B<hw68{Pr_~Xt{ld>5g`VnTtHc=(;$~9pO^dxis~N_yS_#cMuLA6vMWg
z(A@zzxd*Z23k*#95voAIyB;iAUlIhG>l~!%u7lfG1l1dwT#beq2RY7HR)+%J5XmXK
zp0TlAfNduP(c0P?6Hpp`gsCQAcGEo21t%5sKKv{Sh;mJH`gsAN%=-YBiei|5X*Qqt
zWRnhpnT;~h9kRfzXNf!P`wQceE94&lL~BqYR*yFgMd>Ekho`O5nhh<s95NLJ(ZvAE
zu@OwHm0=13hJN;>KZLjd!qM>66mt4UVB{a&UI8IERB&z_ZI2Zyj?LBEWN)uMdOO@5
z1$lFCYHF)NLzh?nFo0V!eO9Dq&9i4lG38joP~op#30pAaX`n5g%jj9FHAd%g%xWc*
zS-2ImO$BY^1c-cz1BYjIBN;pU(MOR_!fk+7QpcLe=je?@yAtm1XQ5v02)nDA8es_A
zWy=ySRf9vRa+(aAl@*qDuG=}5%&MXdSStCxU5mRC31Yi@W=(r6rR17**$^%~t80YZ
z2xiE+AJBr{MjYZ+L-)Idn8<Zpwv;5fk~Q$!OO)1#VxOq{E!gb@2>TmdX_HM8gh{#R
z_27-_@V)B!uIk{e>d-@}dU2#WxKJIwSRH&=9lTpT&t%g_Er|!^_43T#g}t8gbol|d
z$}X6ql8aSS68ycs@?v@E^>BH5j~T8<8HsBkBLPWB<mptPGBKzeG0H`(#}89N@QeNU
z%!;83`;OQQ8Rip|J49s*aswEB!sQEO&XstrQFfDLVfrMO>ByO?oRFE!wtzW*MF}r5
ze6@%ZT8E+~h!?Fe+32ZU#LIq!nxDSdda?Xs{v|VpW`oLM?=Qvlj+w&Vmbisq%PHu$
z2?er@Ewj$>cB;C}WQ;}#d4}8v{SXvG<alqO%*4$BH$Ksp)mzUf>5R<87$8^>b|?TO
zc7~#~%-I%eFj)kMTwM6(tJzm0ukMonnOQY7b3c$)kca<<yK?~fJ&{hyWH>uZc~fUz
zF_eK=__=F0zdJ!*LnuI;@RASmP+Zz$lNpIc`1YinB#&>&*`6Z^Pv-e&x?!knnF!16
zEK(8j@Y*1bKql^rG7xuB@ZnCi_Xm7)rP_O&3PXfaf$*c;<IBh&+_`#iYqpNuj)(Pr
zsVbI!CY!ODd@^zv1&BzK6*VBPY;U1%`9>Jsa+;veC%oe!yHPuAhMTJZ<A!@KP{D^t
zodK-2W+t(pDS*?4><}QdC&^7_06xnDV-FLu@o?85Ml&AVYA<T+HLcBZmh;kdX_dY1
z*zudNn*+K^w|zdLGA}+2<%ro{uG+(u%GxYwJgiMu*3VVe7Axx)c=#!fOer5%*`Q`R
z?1KR?({G!}%AMh(qz$Tu;V@BqA~FZj#X>`E-0p7213o#lTw$uBgLtbpsW2oL?7T`A
zx-MH=wD6zH+nZdJ^Ko~jIA1B=trYKrRs%>hoU0TspqRr=9^CoYTr!)MGjdvH-uHeY
zl{|!#SZ|wq=YE+bp3Dl{lL2V6`ea5+N{Q-|`&u#tuIZ(k3V~{R$)I?c=80xGCxawI
zAe8y3mWV-s>_~D7#0j!7%*mLqFalNuQ<j!4TF4#fSw^VV_957}4+uzOSIjvStz@=4
z>efT^9k^&U*ng;AVP*C6O|@NyMEO){)+OQ>te+;+<zq$5TAhj!^SF6}aOsH#;`^+b
zl?_#g6l+<85WCR>0$l<_f&igv%1ApS+9LIR`jB5{5;4++7y_uYW|??IYj(*qxI>e)
z>Y3##3D+_Ad5}{m5j>Al;M7c_g@d*dc%UsMh^^K+F!X9b)RMdx9lS;lgIozkvlQBn
zh)^?iKalRG_x!KDQ|NxI;cJjjB33|r$-!4I#*l@)$1Nic;QR&$M5u^&b?o4$zJs1Q
z;*=cRy&@fqu6(#Q{C;Zg;Az*vot1;W8-iG?qfk#hff1)TcGH8v*6G>vT?gSR*~=8*
zkKsh4gh_=^EYiZYt*-R^NH`{B@k-P_#9SvSwQ#I5^9U<ZZ&PMtf5TlEe042(DUp_?
z?DMBNE2Vd6#D-n&kW%{db8)>R7jG4ZV!fuky?mDPQP$5O3tpn!yF0^>18A4`twLp}
zC?1>6TC&z&n$!}qVY}H;=yAs}SMOG3VJlX%YdzBJEIU`Yj<XLDSt_3XDCU5)GY-q^
zP^EAkq1a7I{ThPc^%zCekw~bYT#O}@M&I7R98Cms?Ly1}D7B0rlm2Az-NQ%3vV1$Z
zepK?)cdI>4TWcY>OA%(N17q<UHSs4UBgah}hjAbl&(BFs&nPikh#0tyA-A=vtX5Q8
z2sBm?>2No1HuMe?VhllBrV_l)!4?@M0h+==o9HbqfZdu|)l!1DvXpqMM*5zPbr>q~
zJ(CTa+8=AsmKV_SLARP^Q?ViEMsO=!W#-pPO3`GtCq^+X@%l?XZY5e>3Gh>XXNcTG
z7r|HMgGbTI7$a3Ip2iX<9yD~CU(W_Uz-fNwd95Ik2lIp|>7#g!Q0*TNdI_;vedQhx
zZTTWfWgZV}3DHaHY6z9w9hQyaK_vXy1MtY#A?x3Ey9A2d+@duu*b!o|JOyWb+@h9?
z$;T@4)!rNU`V&GFx2wHZ_z*?3g0#>@Xs5%}o;Bp_^TC8@rD5WgReG2kzOg=vAAc9E
zdx})$6e5luViq1QHR6WEh3kpZVeYP4RS-X1T$k9$4-T(V`xtFrny|>E1ZXT`;RSQ~
zy*^y9!i(aDu^%9Q`_I)U1CrHReR4Ha{5EtwO1L@{rwS!7WcvxQ3EQlkC`6|z+A&da
zPE&$N5H=lkMMEja<7Q=Q%8pdp#V9=x*F0Ws%@L%ZEVvZe3zezsc93+s>-ecKkMD3`
zTYX!~r_yplE6o$<-o7k3m&8)AI^|okvmvJyyXeDhP&m~3iMuC;3Y>xmm_%Z6R&2<W
zHDIIi5>YA=I19od))CavEclnA6fvMWjWsLc5mfhqHH{Cf8J~h|^3@G|HzF3@bVpMK
zGDSFXG5{{YqHu(B8RD7V(h>3v#5`%~$VGfFSU`e&-xrbOZ~>_ZM>!QFH-Ik@G!^YQ
zr(hAuhmqBTp}AUty>#$w^26o1_veN`Tqp|1I7cv8S#Svk1M5g`R9423jPS@&%@GWm
z7Xr&k?0HhTFG5L5O_qhqd{1R=)D@BQsUbP<Q14CIB5ubK++3*4b|WR;s82!3V}~!V
z2$4<EnC9otdrHe48d*qE&wI#Xr$T))tt}CcV+O2Eak};ihB5E%bR*WhwEb*n2&nBg
z5P04~DY8|d-`*K&C^Hd^W`1sDwM4o!nM}%B!g&POR%UVfy7M$biLYHWZ0S>B((mD?
zbKX4aadf$&Mma|?l38#m&wcAo7JE>iYUC)POz(9W^|>l_*F-#S`SxCs8es39oO|<R
zElRkOG_k+cORcm0zTV(gf&-P!ay*r7i^bC_7FYy@>EMA8{3{r9vK33K&4z5K3L|b9
z*NOj>R$@*pGI+~`(wt2u4+jLjn?RKS)E5MAU?_2@W8j`&t864XWJ8H%bPaTxna5SN
z7FAQ5$-NZ34Uk^O+S0Psq8pju&C*aPYsV`Y!<JT(ay%EyYU<hGjWROnm?1-8IUZL`
zMssA~J9HxzYf;nA!MP*k>|^PAn$dKdV!#>8TAVRoP!))*Ni=~ea4T{ZEN;JyY;%s^
zmKldu)D|_)*v8gJ;+>+Ikqy!>EDAYNtU4iEGTc~)$}mpWe&7X+^JxE%<Leyy2Q(d}
z=IQWb!iU>n<`uafir{|WYT%ib@}0~uB)Ab|Lq#LESc;R0Pul#L8nY6VEU`1?v)FDn
z24l;~e>Xhs>Y7%XWnT>H8sr_`dEx`i;rlzhoO87M^EM?j7;kg<o;i?U&pT6u8&m8Z
zz;uJHUwd!j8|HkVcKDQs2E2&*2Dd~Kn%`e69Nby?aIWxv=^kVZTwungsrj78gaFr2
z-bB%zen*hH*g)XT5B=qFEbB(pd1dbHM(pm}GRk1vPTRuDP&5suE<b>mM^fl<e4Rra
zo_RsRKK)qk%#kUq<>IX&VtF5v1s0WYPs$1Qq(>9;>ULp!omi2;kxR@MVrT9!gGpdC
zIx{SU%*~`tS`230BpP~?sGxe&vHrrPZRQc09>>=?WDN$x4(QyuX`5)U;aO75xq#^a
z%yV?iu#v*siwpZ3KZb~d{kA;tcJ$`{#>2o6*Xe)#QgT;s!N?Zu8)iX|5>9g3M!h!g
zf-M{@a#wJPv?&-HB74L<3%g)I2-aiMUFZc5?=0x1cSRR$0~UE4$bf}jFy4br`K?r9
zksW8c!O4h)ai9Z-pART_A+BwKXTy7R2!?2&O~0p!SZL-Fjl-ZU#E2v8(1YJEx$&BO
zWpyC@14uAd6L_WRt6+ErTXUaL^W$~TDAAY;Hs<%d5)QC<9$<p^mG3i6Y&r)%<)l+N
zvCde|poDv>35GAQW!{5MG*EH)xhHr5>_;JV&V3w4iAO*K?lV$26hYi7RZcXs99-Oh
z>k_u<rj;}l^ZRs|ZTy=P_T>~aW$A5)T{-uAh9}no3VBA#Oesc&Urax=f-La!)$?oB
zi=$zR=O@a`<@NHsRG!{Dx7WAVTYgYx8nKRRNi77xl}6BPU>4rnxk#YJ@nmOsr=P8-
z=y;xDhp2g;V*8m;BAw6_sZDP;p65A&v5=PZDV?{vp>iwMzcS>w-VW$ln?YvP^P5Vd
zG|x*^`idAqIU~WVUI$M&#0;|2l|pT5+9wC+#`l*`$*r=QlvRyiFTU4To_{@DUbkz-
zUk^*=#l6A3KJwkt>tS9p(>J-&T3cx?mncngYsWv%8j74f%S&Ynd756|oq~BIs025J
zvTmW>cZ$)CptKT{KJXHp1STY8I-SaDDOol<Eji6E1plv<-TS-Gem=K5yIcIZmr{K*
zBMR`)b0$1{v>-dmo@d<js87w5eF3=P41SAFG=o^EJ&!<`4W!zb#jI|g6s_Z)sP@j_
z`#TaAcwfO9Z<;!FN1)8x79mDUw;yXS`h{X67bZV+&wc2<%1fp?DI3l0s$ufVyTJtb
zv5|zk3$<=G$#VP*EQ3viFQAC~S-A4;l?gO_srfq|rF`4}u5e2$xS3cf;B0Ev4JM_9
zQwyz_N{V?4(ZEg~MURUZ+Zn0Mjnf(pNhq>2JBXm<h+{7^Z*(w>pWia{gx;Z=DVY-Y
z$2-}#yKN7LS<TVzy=PzR0djd)B}Am427;|;tcE)Pq-*_)oKZW^usM#ucbh9cWNz4Q
zUs~RkbxVOMq_}`=b+nv9AfN;?RTLc@aRjJ{&Hca;u41==g}q&s;<ZZgL8aJTc|2Sx
zUaveJz*67ayhN~5vMGbMM_MuX{KfZd{i%VSQAd9Y#m%r<;ygb??3cm43+3fsW~IHy
zQ1V-5a$U5mp%z>xK$ai=?>j$*h>p<>-~CY7|EZUkD|R9yCwzL~br+o3Dal$wvRYM5
zqK3M7!kCY>C{?EVv8uP5W>b-0)**FjYQp`zgkiGFPK9d}HD><<|0p4a@D^#g{$SHI
z%`4jRf-U3U>5ze@uOv&BqQxPb3@?}}7$tNse3o4JZ^(s@eIk6}8?0O@XHOo*l+tRu
zDr>q(YiM(7<XNnrl@!aFhf=yUn^X+*AGQCb?%JB#NV4=_$(WaYG5fGTU?zIDr`;R7
zGu_edo{gBd0?JTENKC1O7oHFoBXenYyDe;EY|L0#0tAQ>0%3!Q7@1cLGtpspKgmI3
zdJK;J3!CSh%2ElWtV+3eBQ_jS=#rHxXJuu6=k|SJy&FG(lrn^-sySu#!6(Cd-m3I)
z3F~(AqI4G;+xY3K;7FcrChM&JM$IazJCt2Ck11#wCaQRC^k9u*GXJb18*&w6GTn1a
z^bSKQ3&jH&c|V@TK{b)>0-ijIXRtOmj~^}^4W_Vix0!S{8BSysdkYnK@6wxjWKPbI
z{4YaE-bs}9O`sFCRu@>=kLpF8@ABL1>`s=wkFTFT+y;;8RPp_og3s9v*cF9@?Ia*X
zNPnZ!-9?YHG)_rx^IooHCQh;uUuG_7^ukGXg?m*q<a{6~uz%zH2GQK}^P6ghXPYkt
zu7fo>(7c7hwR}u~czKsYPExw%ZES>~w*-6htP4t;$fK)+QW=yEXHt~~zL)?dGjTU(
zNO0E1HvE)9-d@e^TNm9jgF(VYTOVx!Z7N%UC>8E*HQWTdFHNmg3w1`ki^f$W43?^r
zhqw_^X52>l4Zhg{^XZ`Q8_4$pbIHlSQ!O7B2$x0LI++y<hc#z2Y$DmN4hCi@0lS8d
z=ru<X7E6k6S5|EN)W$Pbv8s512%)jQ*ee<fSejbosV2Gkq&$D5O?1#XQH%^4uqmfL
z8NW<oOS`g`z$)fh@mARzUJIqsAGvdQ<8bQmE`vAhuS?=aA5Q)CIo@af<sG=s2P&bM
z8hCtsd*bBY)8m!DyiIJLEUh+`%;$m6KPN`rT`k;Dz_qa}-NDu^0g~vhMmTcut5kNe
zdgGM1o|x6rty`UucAv6g-@cq=H%X?Osdq;#w<pJeFZo&2YyCj?ey<*i^mJpO;Jk<5
z^OfAm|5@0&Zgh3&AwyoDgKqw@`8VT*Hxr3BHw(TF{>9guKY#@&t<fQ4=%zsX836wQ
z2yuTX0eKYqa(`U)4FEhw7TjHQN8R90;Ui!Tl%Cb$tjn7opOxP^q8?|-v0I*ghn9>>
zJOq_78MJpuv1J86l~#Rv>u!F@ZZY)G-y6B+7jfWtRuA_^La`prZ!Ur9^~ds#U#EST
z9MBr->o7E_uLVeQziRI_y8I?4CUHcrT97KM!(?vj3dQ87AXo0U`#byDmjf?{UykgT
z_BYx7b0p2(dO5Ve4H~WcyDz1#7&pEV=urdXkB_EinvqaQ>Usw5+>cD8&h-m~T-Hpj
zM-Tg#Z`ZQszRtf+2MK?ChP1hSIf``YorcvXy&kYfKQ1reVD#vI!vUcXF}ZrNwRB9#
zA|>S=;)jwtjFB}r_3Hks#aGE!a^hRGHVY2d4q$9~$7YZx2SOvC=+{DewBOqMmLI#y
z8~x=C{|HIRrOKWAzVv;O{{?B(b-yUd$3~QfUGp7Ch?IP;hdcD{R{ma-6X8O*<Z~^o
zb+pF1<Z|G^nOlBjmh&-rk5G5sYlJ{B$S<t&kE!vK`_igiFFo$P3k<pUL?q?gfJNwF
z>?YCvEpn^ZFKF^EH*=cvtvwgysV~0Uo73dasl$jHcq(>xi9PDT=KGU0MkLH*NFFMn
zjq6&){J4UjyP`voZ{#0I34Wd6lB4Ms3(F@Q@ZWxAIZHmD(q@-cF7`>!(3P$QxNn2v
z9f<cRUJO>@@6BD;t-ZNOj~)sT{K-0HX<$@u_(ytB-w2H3Qh@ATXYxg^YhgLbYmenw
zj1Av4q#6nedIqrIeysClGXyf4zOkcPzNYPiBEGjNk?25*JVp8c<`~T7jpx9Q3rC^?
zSPP&l>W^U{{N%q0ARlfgU+hALoK?UK{v21ya0C3lpu!E%v=^tk-sX$7+)BF~8u(<u
zyY+M{L5Xme(a0`T)`o-_Q<{GwJ@o<wF8`auNL2jqQm{*03GqtNM#h9kNPl~~QoN6!
zjAPtvwo*(&Bn=pI)6i^7ij5$Wc9}0Ad$X2d%Gql!@{bX*7^VVB8E26xcM&sH*8KEE
zAb9q9XL-(ImTA0sJ|tab1ySzy!0|0<yk8J4Aln#j$5LJtq+1AEF{?AyRl6G0M$lZi
z%&?{jLuqc6o^l-Ryy-}+3t2avoahMRm8Ub6bX@ShR`b?tVr~>&8+F)b;nPuvWmi4S
zR>vfBnG$BiH3K1p;H?FtZd`mfz~9o9=PFMhVT&0eRd51lwU_!dM}5O@HWi`!e{r={
z3$r}8cOQ9o!{xcCWk#;)Ok-VPqchSQ?o;L9buGqd%a=GIA;j0xYP5!oh}Eg9*93g3
zn?OCzXShb0{jS-^`k2$N-lnMr`#wXdE{OGsYb$5|9CL~XY78%B`iL6C>O5maO~j94
zdkL&CKw4gi5%S|eW$`=ZAYTn`!z1x<t?&;i)Hj2l=BjZ&H_n%J=mj8{l0y<Dm6+eZ
zh559JA#S1ZtF@q*?vAnIjRFhjKloy%^TiCJfP6v$w?))7dQ>8TWC9%x4I_cY(ouZu
zXfSaU&pR=|p(%JVw5S>bbWI(wbGX67D)vnvx`2;z7$0m95TwvwRxo1Zu%G6le`G#^
z#>nZd;dh1X+5G`Vp}*7O#OZAI^l4T-`sZifkZr>R`EL>#eV^3kkvYLZyHZGCxdqYw
zu3@aO8VOY9#h|EP0VnMiPUA(G=krd)nu!M>jZD9zP89IMY{#h%k3|!)zZz4+CCf-+
z5#f`XC?IiYJjUbQCTlp(*(yk#0l~eeH8DSm@r^=(M<$0g9y#O*z5!7`)nbo7XlDP5
z`|bUP#te6z?J}4{m1{(;K8OQ0Q3O(5v-vZ96_<B+b-!Q+DFy%TaTo|}vjZF!Zo9-9
zkR7?kO`SCi_mz;%;aQcq7J+1~Ynp$zTWd8bMhC0-pKRT}4h$2vxZ~IiA4eT4fhalL
zFLvGVUc>NV2KhN`Y<RACQ!O4?*YM!S0p#S!RdP>=yrYEXNDbp~oj@T)1+&;m4!J)D
zei?RR3<w^q9ZtdR=f`@tzl66RVv5h2i4PL?Ki-@_9@&H_A#xKpSzkkj0rmBBV)S@)
zhg%EHl-g+R?FKZ|LM<5ClF@K@T<#5vX&j!5x}B+p+B%Jn&K9j#GeTOc5i+d4^8vR{
zc?0%QO@qdkk}LC(LfB1!JLT9M_}V3kvjt|B%G?PJ#)}=2AO^y7wGLfD#$T*YgztUP
zNLP7|wMU{>tS8KmevzF|`6|R0q^sraj#ka=(&egbD2EZUGyrrxk+1>xqFo+m0%-lI
z7PEBkIaYbWKO8ZJx)#$M{BmG?6SLuL0gTfmXW<`uxLHX}<K{l?s#DlvzfY4og`Y=V
zY3>28R64<xkxOy7hak{i76fRi06eYl@{ifO+yE#y3`;}7gyOb1@Z$Ju1d$wN`r81+
z;6)yfZau8jVMMU&14oyD(+F0w?vj22<v%%;V-7Z29(CcA4Im1>OJg;xw0D9@H<3P!
zUmt(v2qml`9!jI4g1iSlWd8||l!i~#i~IbM+I#R~G=TTE&Qs#=n7w^Cei+BPjpD%;
zI~;QSvH)8qV0S7x%=x?Eq{BQHxwaR_9iNsrIv6)>LmlN*xFf`8rY?QL30sU{-RrLF
zbtF`=TTRBV5_`>xq@xrE6@k2yDM7}X#{E9Ygo?pa!~<EUP&NSh!)<U+;&|oG4bR~e
zzLvTfoz40mOZSd%A^-n?bbS^3aiiwmy%<*j?|#OjV4j*Pt^?QqHjQilbe-)0H;)n%
z4ma{I%{MIqRsF3x`w{x8WEU_flFVx~qQu2LZSvwU<2=&N$4+CtxVtTQb~n{D^RFNo
zFKU&yt^QcJgYjL@WEXcpMSs+l2;5nu4bm<?^@?7o&rKt$Gd;}OI)Nq(It^*8v(5&p
z?1c)jpYktm;2ThgQua3m*bT@KUqM5A64T5xir$|q^dLo8k8ZNiKj|U8SF0U#tNd8e
z59E2Ij|K~Z(KOK{D+YLV@(Ge=DqsS*jh>t=9_jTgZT4CGdJw-;kwI}bes|wzXX$~n
z2j8f=bX33qsx*m=ZcuXqN6iyuN6i+xTvkaD%;T?_M=uQcD@mAXmLETCvJkkq3HnR`
z7^WLy2nHG3*)0GYK&$~DHQ|nt;f&SF!gwWfmjDN`oUVW?26D31m6|?ljqVM`Iy812
z)<Z+T<^?Csq+RnzN-SXYV2iRu6W@{7`9COV+rYgUD%%bdq}RlO&4KOW9B!B&l&VN)
z2V{ftXp-YwfpV0cBU_X(J%U^w33P8PB2cjfjF}|pLX@Rd+7IO{=&i~c|E1j5==Mk;
z18Ej<jg3c+V+Jto83;`05cw>+07uo8F%4tt4RF%P#$o*UfI8|+n~dVNG{A>n*M~-5
zD|<Dn%jBKaw-fPZoEaA^BL;olT32>vbe<O>QuR2Pdep!a;+Ckzu|<b|-sA9vXuz(B
z27VrgqJTKN!m4*aL1E0NNr99|WRxlp{Zdq+=bTgF2Yq6QwYG-O1z>^@9yC1N-?jsJ
zBL4S&0wCOH?U7KJL$~s?P-L%U?9Ev(q{XxV*4#VLa-~N%+B)T|bmW@o*4p$i1qUMx
z?r{`F9sL{>zl-+p6Oq>0inMj{yUon<g7fqKo{AjC_a5TW#^Z=vok!t54!rIPp-m|p
zBO$4rC}J8A`{AlLma$CQh7`Q>b6+S;WPn3TClKISspQ9y2+VeB+5pp}oY)qE8>(&N
zkY}TkAFbF^PAJUZzi~Ra{?5+5Pi7G2jHTY+cnrdnX(_h_!5a=<2C!sp0CCN3ZRwfA
z*gjZUUlxI?0QxdHyc-WaNM*zmVj{+Vq-e+cB~4&-mD=N#>>~suGnM6mN_H8E*i!^c
z2GI>d)nc2KtaPm)Ad-VRu!RR0=OI<dTU6u4fyXk&h=}=%zmWF^Y$_qmiR!Q(vtMuP
zj9l--bhqHFl^>biT0cCK_(*;d(*LWx+$+y^MHKOnZ)fX=-*N;22}uQZID#Jh9aMjW
z`gm!c8liw3GL=sY1n|V!_0gX(jfGV!2nQC09HZmz_-B7VM$RdIs_yiwZVeL?0*p59
z_tFc+eVzWVQwLi>GY$nzBQ6Vo^kg^Ou*EHx<K=tDgVMZOj|XEX!Ou9E9zGtn`FTG-
zd3dt3($Jgnub~)UMoqn)IeX<LW`x^9v37&o&{n6;E`M_6Ul<TeJ2dl(UJg7RDlnWZ
z7Q3$A3++H#Sk(@xMvL_~(`H;B7cQWst`tl43cPFqqKWX4$hJvk*GG98_RDa2hqcj7
zxWsy2OzKwn#RC?N#9AX}q_@1#8F7_c@Lni-Mcn(_a;18IyZDDjFWp-t@HCX~t#?a?
z6rv6@)}ysYoY`ZvM_5&j1idF_Tc^<*iK_k|Q0h+X+u%A8L)C|pL#6@M<~1TPS2>(8
zbW{14V7sTuvbcm!l2N|l*HMLW^LdK+ri3X#smfBEBLQz6zk5awTa=*EUo<MZ)%_w5
zXhzv1;}$o&Vlp`_c_u`G?wA@Zafu$~N@_`DM%Z=bXp)peK{*xksvaDgWa2g%{rwu>
z8E{TS`QH$(B#va1;{5^mg8ze~q2$rv9A@z>)o9kl7mwl#M+0$D>c4d~xO+S?bripQ
z6raUriKC$`vZ5B7^#9NhLblHwf}n<aHbS|ZANdWpZ$qXJ<N}R3dfx!0awv;aZ~#^Q
z#w8C?fYaNos|9`@TPb>!@WS6uOq@O3b`^5Y9<H;~?^0(|>+I}N;&ki|k_gOZ;oIR{
z4In#vG;%tgI2}(Z3+c=tIQzP?zJ%b+A#de_$UgwgR;6rhD6w<XB@C!$CjmnN(z(`(
zjk`ny=eo&>^XKY=!)C;K&Od$lVx~S;tn=_S)cGO5SV`pgZHnHwy`VDu1VLRcf=$6f
zkgE?q#T90kFfuoaC!N;`!V}^Q<9gx?2;xm5vW<)WQcNUAsb#4W$W00d8%YJ_kkSCK
zuH#`l4|K(qXO;x(fn7&!2>o)9C6!Wk%6(oB9<%GL3lOxawRK+NZ>+v<p@j!(>!7Tz
zxWLr~)gY}$YiU&Mtzo{oMm}I6MF>94k#>&6Zk)P+dN3TasT6y+#4?RR#PKQsM@gX-
z%uZTE)V@M~JdgH}$9?BUCHbdC7u%y>FDKr{{h_{^zfW)$R0W7gM18If@E5Z#x8%46
z8|fDz4KVl!krcr(C>Hrq-9uP=%g$R9m)OB3_yv(`Y#Us&!~AY;@w2c1s7Bt1zg4`i
z!cq9(#z`qf_Q-#4PaKbI{8U^%o)~#Mo}~eKJ2m>#^9@o$YQ~LteCsd#H@vnB{EYVp
z7Vo^BOOQyCvTw1DYw&|yjZxj~<&WkCX>#$g#I1PsSHJ$P-p;SZuOW()=iVfi-`vc+
zNvs4R2wwRi@jCr_HK0OyNrab|o!3FtW`IuMbsm@mpw8w8slNx=1ZI&<;Q8SYnEu=-
zes(w|J*NXHh{yPhw(;nrW8;+jmqbK*nO+C-k*c1DvFm6kwpTJNJ#<Y<35dGrd`fV$
zNPe$T86D@2G_MGl04`tGjc{LkJD>GZXR9W9^p)tLW>P;a)cz;;OD{)W4(xBfjPDm-
z4)edm`^zt-?lqyl?*CNi(dtOa6?&l5pE@+8IciYr|0msSHLQN18~0^B{&9gnIcq<z
zzP8~n5Fbm=fa)<V)T@V#Zh7(z8nSS`G5H9fW_zF!aAn(bXiXOIg`D0PYWo*nWnL}5
zy8r5-zpJ_^P;DWAz<c1Q0ZRP;-H7%_LLKF~P&w~g+yDI}@Ew0&!ecP*V;DfoZvask
z-gnZIFg(m4en+GYv5@~-IUm;K?W{nL0tvGgFkRsrev~@X1vBy;tlUqE7lApD7rB{=
zre*?7&>J4MyMRo{XTvk6_OPrK3d++HEA_=|YID!gp9f52<1qjqa?xWE7V7cR`GpFK
zPE88P_Db4f{eDR6)50yzUuhA1>#u(O<uw2V(zTij_ucVX_8FgWDA06+n*AU@4u$%^
z)wEXWJtW(+h>UqzZ}KNM8s!<5XaN?{uq*JRUl4=;$FBQqmOWO`1y!U>1Zr$X0&T2l
zY<Xs<KZ|o^bEvX84aN7Hw^(IkmH(-1JgaO><8iIBIVSC)D-eQ!I{Yh^sqd9QI9J;8
z&%9-4dC6?;)1z|EXjA1utk&ZPSQFfW{2*<eWyX}R!W2a!^RXbP^DdUjNIF21v50an
z1<H2y*EPdh{CRF*uY(Q7ZL$18>2LYJ@uT!Ctiw-Y`F*;wHB>1sRkm(ciqlw@zwVak
zOZx!)5evuKVj=m!#FW-kEVUOVfHYMA3hn)OTfxFNGkH8VcYHrc%<HG^XVBcIEs#`m
zgGKMsLmaF%jEDV^gTzQI${cxA1_qwd)<kiA7Gq4yMAA%E_Q*Cyif!pNkJ&}3BTz7p
zpQ;zk%YTbl&#Kl<5^K?^hMazvYW|zbEB?R#X|R#L*jw#~l#^(Ad2b$~WDK!qYoB6t
z?2mj?glJJa-}l%bbgQ3lCS72YM=h1qIIvo9;zXVKVo+(cskf^|x;{2wn|3E1_EBDl
z>Y9qCxwkHzj%h|gD-1^!gu#%cK-8!RPc>VFPRBHPt_MZoxy?K%iTHO_a^eI?Hu?=H
z0ylL_XT5xg6$3`csp+@rdk`5KnnXhA#L>_yMu1iYw*IbpjXbRJwlN9>HQqCr6a$Gc
zMGV#yybRHb@tU){+0)r9I~`8FA1j_cn0O!0GJx1y!)IgaL6{N9jRB;arflB~22a-9
z7zt|W6+ECg>2g7{^Jv-0yZI3Pyo)m@BNTHB0@fy3jSb)=6YqM3Uz;rBUM8Yuj8p(!
za|U7RY-M>()Fe$3WOeatGdaTr3}H;QxUU%G14H`G8pb3Fh;frt+&zl#OX8?rzx_oy
zzqifTuo$Kf8!T$*d=a|dM8P&KL7>;?!eCg2zR;n@Nh!oY#7xgo!)E7j9R1ZCP=Np0
z7H`QFxTZ|VOjoJ1a5-f`&2`keR!+gH-4q=&Q7jh_3$9UIfijAp5HMu5K(Sd;<$_Z|
z4aYRb6Clrsn2`%2){vwUX%|;EMVWj!#YcYUaEd`;JjMgi-|89hrb?f`89qr*p6q;k
z@_fbxXifR}c}JO*lMVal%~j}VywTFz*pc6=wROdMTA~i1Y*_AWn0lKPG5dU5LOjjC
zk>&rBMmuO^SbZ(sK>vuOGQQ_@YevYokpoG6+x3`6fS;t;VT;z@ZG>BpP3l8KA=F{@
ze|;e+Lea|Kdm1#x>uzwOPNG)Uq3{K`d@c(+!(Rl75!C6{1*RAhfebMJ-m1ZaU!+6y
z|M!^Xxn9l9`to(#)3^s4R!rN1TW(<F+9>D4m>ErpTD~N;m-Q$<(d{ue<tmVex+a0j
z&6=J@*lBZY>hFq9A+#ipqntN&B8y)D_luuX*u8j>$|&__004E)fV46<na$s2Tg^y1
zS(!WeMxF)junN`7B4U_qO?g6-_}x~pv}!lOhrL@LKsKNN{?PiVH!CQeoIx?=nYxrf
z{=U-sSuO<WRuS<PMGaU93k#)pl91f_#1vxQ!Gx3SG{U$%v<Ol}em+rT5rJ72lT6Ya
zv-!Co{{+-IBklc|8$uIHm$FtmK<lz@f|7<tc^-UHUh$t`h^w9u9s7aJu@#kF6>4Mq
zr2}c$79C`Got+xg%m(7EObXG81uCo}?Ft*|X(;y^(m8_o%Pu5HxS16>frPdyIjJwJ
zQNdKrAV3wAub+wX6#So4!RLrs2X&fzq05DRj$H-PBiOsu-AGOCfDCe!;r+bBG3W{C
zH|OS2+2ANu+WjtxA3Y{gLts8s7bUAok@c7n7R3!R=5CORKA%tAo5A{HCZoH>q0)6N
zWW~ZAif*6ZYi1icdo0kjxrR@&)_8|iOnv?xs+4&J$vSFxG1!7tI&HWm7x2w0I)mto
zdEA~C`Sy${(N)gE6|WPi8HKU{n>nURR%r@Xyq*C?H<Ek9!I9KaXG>QV8N0B<Lq;+s
z77snH`j?vZDJh`=q##^!Xa&LPv?8R%3ZiN9!^FnUqY3u@&hY8u#Mz@^a!aZDq8eh2
zB=QC(5pm@3i&gq!TBQ0;iiBT+9~mfBcgoglvVi}my{~I(BTLi#D;XWJ(K8VudU|&!
zVt1o=Bf6)0dMb8%wj*kKDq?Ro5|IYkLZbCy245jzfic*)sw}WEwoD;F2oSQcK-hpT
zDBsS)MNduam3FbPUB<D0VbA-%=VYEtQu!syUEMQLQCP}6dGeghRG#<ad7oz<;lU_l
zW<;_wxUsvim48#l={Y7XGT%`w&$bVOc?q`*fvgHfxOjLgKt$`Jp|p0#Y%ApngiCCM
zl^#9F<vX19St^mfmP#i^lpBV1rRvs||EQMq(q}Ku&Zd{I0GAg-%y&4-YG{@4OO;2c
zkPQ&SQ3f8if<Y+*X4_4GinI6Bg^9LoZ=4bA71{eBOr^tIO;N01v8yM*%ZQlO10wEw
z;cDmkk7UVa-`V2$*^`|hVJz7ibnK;b7^^>_4h=-YYSbYRqg)WmvI(`ggn2qP^&913
z=13~7Mk6=W6e17JA)ddt>~)Cm5Jh|TpSM8fiv90G(@xf~4p4mrWrEE5U7e5^vKR2H
zYt=o9VCDXKgM@sP|Gmb>;S62v=X_M7$g@yIX3<>~#A-Mb(Nnl-Q_6tO<G>5TUX};G
z<EGvvjPc9r53-i}4GTw{6~O_+Dq5#*pkeD4$&xyE2A@h@Ctdwf%-f3A5iQ|GuPRTX
zRR4Iu#i>Tw=dFL-{a<7Mxa$G8s-MmEsn#r)lo|hS7*=EAZ0770;LK+bkq(GQVkt)?
z3^)+p$F8T1$*`a2{s5FL<|Oj+$LIp=xCQM1X?p+LufBcq?L+q8edW)-&9i??-#%df
z-urfeeV+Yx1@`>8d+cwewLI1;?g%R@PZpRP9@YD!4S-a=_0gi+qm7W%yT4!mr_tXx
zQDkcI_Zz=o|NYAE7yl{4{#|s7Lwdg@08lkrCmnA#pie(LI*9eDiR(veE*PJP^x)mN
zq22(G$Stt>`eo&8Y`Z<}h}b6}YF<c4ef@vNmQYk`9=FW;6O|(OWnNTC5`{XY)_17B
z(k&7SBwAyc4-fCV;ZpUB@lD3<X)(m6$c&$FEoYD^LT4ObKvRFmC6GzpP#ql@J-t4U
z-U|J#3o7*nhF^DeohkQ_t2{XT(F7c#>2>Dzv;ut9`)iy~teb9EvEedAK)1?2C=a?(
z;|Kvb#QgrK&f@MKC?1E>Lu6#fYQ(%ZnvKtm*O2pbH6;Hg)uYv;B`-=+K8^sys?pp7
zY&DR{Ln>0a)=s6hvnjNs2)_EVwX^O;MM{1ZJ0nh`eqQs4Pwh0P2hIwm23pn6`8a6R
z$067K{DjD1WbWTMfLFo&D+yxPY7liP-FutoejxYZZndfl=;!T{y0a_5)g(3r;8yVC
zYVwtL*PZhI-a%w5^KYV2^;IGD&9j7a4u#ldPiBq5AX=h|n$nJsHg_K1KAb<BD!43!
z+(mr*@X^^<8`icaz^Q4Tfw^S8dPdz7%KG#`x+RGX;`LlAF|Fjp_(hkRy_96y#84ZY
ziT1kH!|;<)uqUCtZn8;(fK9!64c*buda}AMn}}9=2)M26Gh?8e&N2h7|FnPygScDX
zSz#}8(pld?(;(*TC9%)?uz4?oeAeEIByNq02(vFkAl`qZ-QLo?L~>clChdY#_UNf%
zF+aPflQo+OhJoe_uXxdnVsG6YKXlz*O{=acX1&^6q@p8qcYseKfLPA#@pI5iVpnsY
zU+fNM(47B}x^5K(XOPv6rGUh-=Gb&km@|BK3XNNu4~z@};yD?208LsB5jr`Z54a4R
z<*?S<BML;U@%PWQUczmMor@RIX5>cVhN=3<J(~TMV2|CfFXr(S!tSOMu+G>-<+2~Q
zcrk0<^9hypxP@;8>>Ugvc9>gyRa|Yiw!%&VGibwFc;g-dtrLm5O-+qkm`@bZ3_Fhx
z%oKQf<f<6=Af02yiz`g=(VkJ#;w4fBD1Z&GB`NMfvJJeFHTOCb*O1cYGpZD!)nD*o
zK|a0#Du6(Ywb<I&Cm|g7KvP|miEP+chq#5De#?tMKL7l44-q7MK#o=c#H6}C#XKxj
zKYrFD7jL+QG+3{_dVz*}M5B(&ZdvAXH5FcXiDW=!#fIQyl1eGuBLkM%IhVik#SeZt
zR1x6?M7UFXv;L6k9y*2vphr-59)^F1*y<4G@d2vj_ZuIO-u1d>v$SnSs1w&XWDj=C
zlPB#$5ELMKV0!t3#<6Kbj$<1M&uAT#;oe)>dV)n2egI>UNPiIDh_@X6;tHn%o1FjH
z1FU$c#ta{RK7^e>vt$pQ%5U_+C<YtBA%)d2IKoMNne-EbfQMU4(X*URus(h)J>p52
zdVmOoSXm9hWn&x3Ggk$~F)!%ac`bs8Kqo*<LafbPXxZ41!^*~t1RUE!R>(QULU7_U
zJ^>ey-UN!)C*BVjnech>^krY@BP(PCe0@4LcRE%+%|1gg<L>F$>gm|FOh*$K-e}ki
zCV^yQb53xl)?2kL#S1r$Uqs`hz3;a76|_HQpMSVN4IHQYqvwyGpXVMZL}U{n@*&KV
zkYX`b#szI@jY}bOM4~kc;=)Q3AR;23!@B9Iyd$2+8W#~a$3-lr5Pc0&Z!4Kax<InD
ztt_yKh1TX2vy&{=Q}v?h6s!!_u%!&~WYgw*T+|0>MwzX((<+q|nS{nae34jbzyMPs
z;thTk4Kn}H7JOM+s&Q5Rw!BVO+pwP(uwlaiyC;_9>fAI6DiS{HL11G8QFuE-+Q(=7
z1#G?^)InyW>*YGqN#bSk9f&bmg!WR$rY2=%b6FF7poc^k$_+>YS6(nKZRlTu04NQ+
z)2e!G+L^<!PDTm<71<VI5UrXkf&q!?H`YV<7v+%UJ4|^v{SNEW$^OGV@Km<HoJM1X
zG?2Th2qv2t^pwdso|t@dzXbj6&*=0nzT-jBxZ2m#tFjY?9qVNWhC1&Q(H5r;M4PDQ
z4_&EIaR;C>jqnbz+NV=mPb}W2^++wOp`O2@cFNzzoyA9md=Uv3tSspjLZu(KRvg_G
zFxI`Eh1=6!n<F~#=wK+0dIv|FO8SPgYzS!vg$K0QwOBl*xq|S#4g{=GuWiZD%tQSP
zn2Tj8)m0#Zm8n+Q@rnxKekgJ&i!22fm?@!Q=nNiXmLD4=@aaR;k-h)Jj(`s^?z~Qp
zY_mKjj>^0M1<bAjC6$;#HU+fx9w1lSH_x+G<$bR4{wqg`95S2oSW&7IyKlAXS)+IZ
zttVz5O%ZRqdq^h_S)JuO($&p)zPduPhgVYM<uZyAS}2Z(3+{3#g(zdC#BI{$DWO21
z^Ra*v`sU#c?NwHZ_p+t^h8By&hh1px&^qvXe__@4+E!0~WN@L>124A}sXRf)meloC
z*gY<jM;)EANOj1XIzmIwH9+&^_}R$>YT&&*xr<c+FHfco^}#VD!G7rl$5B~)Q<-~v
zJNw2t)G4HOGE=Pio2}7zFQ0S_+k6^JrT~b7s~r20LIN3ntDvSsZ+6-Rp&e!ug|%cd
z;tL8uuXEFjTo33_h(edlS*>!ZHwa1O@#~TzJ~c7UUTdwgo4SHhaE91TO-G9ai>c&A
zG^8Q|MRzdB&xWxgxWTH5Wfn6ypsbk1N>!2^Zek2(pT}14*abm1w$*9;J6x*RZzzUt
zAxOqSFlGoucozG<c?7G}b$!YD=8+ORT1>Dnv7-ZK<AV0xfX@((=~;nN)KAih@W7R$
z{Q*FWDA(E6UuX%o=*8GqEr!vYzt@=g+^a<bRH}-3VMXeYnoyO5Mo2*MWYVq?{$O&Y
zJ)Z@urb!LO*{rlHSF~^{twBa5sf5*NI2}zw_@^%oHM5ZvmZkU(_{_D+ODJqyCDCIe
z>{Axe9X-s`#Hi091%kL7k0R823PR1tT=VKX3`FE-b66W#GJ-)O@&oISNGX8Vouw!c
zSKA>^d`9O`vz8J1j1P&sKp`QnrkvEmY1+`IMfA6iLSNy&#DN#lPvq!r=@xq>o@Pqz
zi4`b=&3^fIar!hfj&eJh(@X_LdbWl1&OQkC1PvS6nI=#Uo5ctc*eDfL(Yb?vGnkTs
zoef?CD|B<r+B_NCvf3sdmLXy{)IQ9MUP$$@sfVdDMvX4)E8|AL;yl7;s?I3qwFmE4
zHr_AqE9VP0Kjfb)=QoSzdk;S3*8=MA<Wb;8u7b^*Y%Cs>q7vGyA`G#rnGdeWp*c`<
z`w%}rltg*t-Yb?`+zCCxb)o3DW(k4$4sccQSG52?xn(HC9mgMEQ?4k_iw}k>Rs*d<
z_KVCadNGNpEjbjzS5K+O3F+5ZXf00)IqRJukfA-15<Ij7#lYCqndi3PFNjs=h2~C0
zm)+^mj&B^5WOtCZjw*vm%_l>Ib_7Z~#2uwz%yx6F(h``pD8^%i)beQbR`Kt+g7GwF
zTa+fyBBrY0Lsa(RAvWJpHL-wQRTbs~4Ub%l$54?+Rc@$#>Z_Y3)0wTw5!AIz4XX*N
z9wyx${{c9DUIaMVFPyYX6Yo@Y(C5NQ)x=oRT1D7&jo>-YnQ1$NlG?{jEQ&p#U>x5l
z%wR)&yP>dV&oHL*43)ND1kl$uRUl(rjQ-Y~!f*2&D#a;bbnifpr`^(dW1jJ?TepSy
zPaRQIVmoxPrWZfciRv^0-!ZwlR&@!Bm}?Y<vxepJypS1ou!dRMbQ?G6WPISZ@QP?u
z_%5>^{m100C!;5`aEni}<|5{Xaj)5pe1XN!qS_hnsC1`KrYWw+?=<^Mb<)pCy6Gh6
zKbUtmxylx9@`u_px7)>=Y&8c3(aPl8rCgwR-u=?+!|`{cHP$aVPVFzJ-#*Ms{XV9!
zVW-|B$)Tt^@;~E=J}vQIe)<=zjcJQp?5|f%TD5(oz2SJmVQ(@FmT%-rB<vi{V;kZ)
zX4?_;X}#(Gek|c|XD4*Ss`aO}WU5E)2Nsyltp?OYAA8+tJ%jNW$X_^~JQBMSiA8#u
zC+bc)NaToAO71pP?(ss<wd+FmpZ&yqfq(WBOU%?hhRUXv0BN=~^E(oP)zK-K`l;yv
z#hs>{-0*SXgce&XV;0UOd*p;Ca9l^~Bzv%dTcA~+958Bt8=G{U9=WH|VLcwifh^!T
zya}4PsE#E$k~WUg_ha+jbC1YQ2t9VeQ3NiWuCOY*g7fUNHRJCduN_B5qC+$kH!)sY
zWR~IAxG4j0b8c07)GIOMU$|qY&o||Eorrx1T`FK4S`&`d8Y-S<PREu!3&sd(_bd`v
zZ{mj{;rOPxyTn<DR-97st{A_^4WJ8pysx<H!;rrs`wQ#B6*&~XLqTlG^U4C@qFhX$
zSH0o@$;KBfJ>&v#dtde;=E<M8Nszlom36(%c|D|TN_D#jmcQo2(4~2@yZuU}C=vPR
z>?N`+|CT@1f6H!<s8#QT*`&;5xd|pISUbq%@&XX&Zc->@i%trjo4f<zjcp>=TR=J|
zbKU5conq1TaE(dJ6J{1(hY5Rv61EF3-Yky3tsI`s9{5qgfa8jH&z|g@txnl1V#r1o
ze*r;+I?mogP}<tBYhIXNz`-RjI*MJmtecV+h6FT3I39(Op{ofD`xfj2+IE=5K%s1L
z#+9m2v}jV0T2i=>W78KzE^L-?ec!GjfB}K=rcGq~^00+^G&_ESQ1bvG)8#YFZwH@&
zY<_|p=|SZeS}GA?JwxzX=xLbx=HeQDN(@bg1^VA6kqwKT!F4zMO3ZWX10&W~RA!g6
z-$pD9En_@#ZHRJMER?_!9rltO0n8bxlmO%3xS24I6_=p5WC;a~c+YbxJOLjwDS&KX
ztgWPn-;m5@)Wv)P@O_8Scwoe&WN}64R+6xY+4urh$f$~HBmc=KDWr|Qh@+tf!Es4|
zB>QE1D*yUm;WU#!%{)5IPF*I}5^F)30k^71HlNkfvn%50K2Xk%a<U(N2FlHHC??wy
zt4J_zKL7muLFPmL0SMZY)frs9-k(rPH?}K;!s{O5>qca~lfCONhHkIC(ydKw)Xyl=
z&)*<@B{h5Z_`8j@?K|u)>$}L}E{-O-eeI55+vI_HgKhI-q+JHG`j4^OE5%%N0xr}B
zDY&zSUnZ$&m<Qs!#<N1yz3)aueU-&<I!Vn~EC*e9=4ybl;)`oMl_kk*ytKq?WV%8f
zf*SQ3Z8ur$da;kBPy^(hj?POEp&65;Oc3A32&7MNM~4T}Go3={_MVaND7uqYjk^<7
zBeW}#i7uj3RmQ(rEPT!&7ra&Z20SmnF?N&Z$wTV41frt4t4h&#Lna4r{mGo88yA3Z
z^nU5}i@RrE-C;{S3Fut(^WMzg><&YVF?W6r)Yg%GLaH}o=WxT*e)Pz@uc*3L-Ji=0
zzIzuD>Wd{mp3sojoWNz}3j*jGdC4(cRj;caD(R!ZR>Knp4%79B`Sv8ZGzfz6eZ7Bx
z^vF;=nd(ny$=^l0m3i{d7|c!^!8!CK*(1T4ch8^@FpYzkRi36(3g@p`V}w=y1`hC?
z!dVyWQ8jxcr7mk)=pvzGI*9>vtN@GE%8EcaE(J^Pd=^jl#sQ^xpr}eJlDq*|F{sXl
zAHC&xK2cLzgz7{;Nu<Nq{HA$E6g3eZh+Ib!^~m55JUcF&x29Z=rd)}D{%V2%fj_1$
zk`XUVwd)wjm}r%2CiLGUS+&Avrf0oX+!t&?H3P!;2r+nU^R0k+p1z2ZOsRuIssa;^
zx>Bt|&>*{r0fhC_Oo`WGLNMW$zXVSnF#DL-v4Z0&++io-h}u#&NuCeW)91_!2lyH|
z<SSL?qkGQxtfS0Bjhxqzj6ZA2GE-gTch==wYCyy~-Bs^dops^y)l6Wn3ObD|!bw~r
zV3yB(&pLW=x0v^40$G(g!U`}|g&=7ls6{;59#$esA9L9p#pUesK=k_rUOY|tdE$3z
z_S5&34LZ5WEbculEUY11smV4^pc1-cazr4Lwx%r3kv$qIhknl;)JbTfUsdZ=@aHX0
zw2q8SHT%N83VJXr0!OseRr-i~Egabc7%p~|c8v>14hu+oKIcx!n1`&=+3dvYmpkp5
z+h>zsweM8@B$m#~_s<q@wZEKh-<o5Gy62W)B@QXLZijG1=5~arScjyO22_YcKt<?3
ztu&)A$%2hSU#)c_$f_a|D;up<_QR2u;E?_D9x|=}STCnnQV8bEOG_)6r?66$iXLPx
z%M0ls^DJ!a2gA6~wYsK6@@nC-@=3bE5i;xz6-yvq9maIq0~1bHnCmnMn3JJ+`qg11
zb+qX-w?r07h?^&9seDF{Q;*1D9_=TT-@@}0(*_2EXO^I>nv9u{WXgbv6CG5;T8!62
zP;^Se2EOIIYz&`}$?QQhNP}NY_!|6VsDdJvN@%`+EMz6kh-^{;lR#xDr^HOB$JT_9
zVN``owp7SuW08F4oak$>>rB!N#)b<R3oc7RXJl<G?%O4UOtxCcx}I~(Z4>)l6zH9$
zi}qW991>b{_y~qBSaz6TGG1<ec|!LyXUm)IdtaPpzJLUgYzB^IOlF|q>kyeGR3FPW
zE+uPp7n4N=k?JW6bul6v)wRS-86IwFhHOmBkd67dgPEpTUX_6&W|x&C`mRwupUa)k
zuPJ1gh76-Lq=1sy_*3BfSRL&;Ids=BPz}pen6I4a)WqP?DU;*T1`d%CoNYbd#GOl*
zmIQMA{qdTmyeOk6QpR?@!>=!7va<EG(Ug!tTezgO&IVfFOPH8h5Va6ZO77$^>%*+@
zT{O9$V$%yr8pD1M*_-VllU=W8jHobUSEqaJD$_IBDy_6iTUaDKr{{2Z&c^TpLN>;~
z;TCM-wnc1hP{npjU&4Kb5V;Dxo+3ODALeBjR8XPxMTIgOI2LThAH@2L;0YkkaYF8e
zSd6*Tp(X~^8?YF6wH9CWOLAB=VboVRIVyT)p@}(z=fswBG6VYpD83c34C1*cb-06D
zGWhwHR7t_}IaVJi<mB#jtksyneNCeyt}1saxuUz6*C|kz2zBAgLM)DGZO18FWK(Hj
z_*o$v$0}swe33d`jS^GcrjH8fcJ1?Gi$ko&*fTMxc=OLfl6S_dHFjC+I@W(|8>Jmx
z)ND{jiCGM<8}pDkLI;mo3FP2G702d-cDB@2Mv3eZb|&%xS?}Bje7OuKxLIT8@Rv^d
zw;#2T!&9ooA&wp-GvyZl|3f6OK60k$ax*%A=o%CssgZzr4ufOJDZ%jzH5myjg5lXq
zZlls#oz@vEO?;ohD^rXp#WM0cP}e#N)*zJjoyYheM~l~@Y99m(i2KrO662SV*cCNA
zz#K1jaoi6vIsHIi)qAv<bYaqweRnv7nJ4sKa!XQFTI<YH-fopQ@k3EDb3psbPE=aU
z(>iCLV&;T410y{pC2$@Q*LhGr;-R2{96QZ!DKb%X;7SvN@G)@Wfw_9`w;PHelJ=5r
z$YIU9E<nAE_`H&pu`y_>p@QW>+nsKX_tXM&My3&2Mh;B}f@u$Vgi32IgFv#c2^D7L
zKJ=bI_)WYonNo+DZ4bXo$R+sTmo^C1m0!63+{irV)zHteY&OdbrYU6j5&X=$N#>q6
z$e~RvTe}4UQBh6ttu7J+!m<U_(=uA8V0T*C{`{o_zF_m-)vZsOvDXP3RU~H%8*dK2
zc(+*hSw#ZJAqk(aDiVb2#rh!{mSW2kg#G(QAZ8j?A{3iVDd9N#<csP>kk@e)rzQpV
zjWGQ@E^j3u`&k{O^!mhw>;Ny-&_$tYHtB&&i|p%Rcbzx7pf8d}aDEZ@P~Aw0*M^8z
z6Blv=#soHa-3UK)L!7UuqZZhLpFt5YnAHj%XsS^89%@SB$!jAiU`fK-QoR4yj@Aa^
ziYguf`Wo*|ssm{yrYfKEC&Ew3gsIRZfy_ve6tmo-Q9+i3r%nP#Af^fE-1J?XFcKS3
zqnA>BlHgBsq%Teda{L+Zi^Pr=*_7kTh{BBSqeUX=3z8&=U%8S{V<|Qney+6fQ=-ZJ
zF0CPtHcXs-_MIG%Igd)M_0d*&%Sxbq3sk$fjnbt36XuixuVTUGyvHJvB9@4l$tX`D
z>94X)DGD}bhh#6ICWVNTmTzI8lAB6A9WW=h?I8ZVrUTt6^2wp_#M_Nnb;GQpev7T;
z%=c88(g%oY=3!BZN>tqPRGg|*jK@pGr%U6%G(R*WV9LFF3>Z*7ebAoD<JYoaYHFfU
zCu&);+n8<`$G@yu<z4R1>*k_C^mG~N2Aw|%ic4&A1$mK4XktC`q1esZ<%x$0EJ|!)
z*pzvm!{AzIoW?R|sc88UYgXnyej&^G)BO3<3_@n!&pd>WP|b3)`u^swa{jXPA$RY*
z@f53Bo-Ta1UB9Gi77PE<q4ZhiCC5a=B4#j3K<H%NY1Fxx6Cc)68P;x6PjaBuE&4#3
zsXX?d$5B){_b2X{$9}d19cV_JtAzYhI#tzW*TLwCiX6TU_>03xWKa@u3*Qns6pw4l
zr7|vWkWH+EgMya|_Or(`DexMyCRQ#ZIU+GV0loUDDUp%0-9Ldjk2_E)14#~EHKX8B
zP0XYe@G~7+#B2$trD~9OA{eK524zLhy10;1b(cVCpc)=fPnl98Im;g6p(>!eV^r4&
zO+ENB2m998r6{e?KSp`YC0Eg;`{K#eHxGwm5zM2&n|vgEBc@#iSQeQ~_$`&tUOfs2
z<c&8QEx4Q<y&!8vUruxj1y}%?<?{8QzZK<TwZA1Za>z-oYJ#N-t1n;^1kO(;*g+ZO
zJK8=zR4B93K<=aci+djcZxWvC0E%>ah$jiFb>Od`9|^gI#4c46J|9o8xbj=UzKY<$
z%}^<hsj7z<t+vtW)YCUbk=fyX=W~M_>w7QW(Mxlf`&qTgq0AxsJa=4kn7t{A8lMGp
zZEG@RJ<(yU;*JtAX3;?_AFZb65Itfj;sJH(nyI=6R1A-s?D@%_%pTX2lSverJTyOd
zm~|l-op|C-X7%kxVn%@k&aC;#JDCkYu{Cq=_2DCCeVvVFAa`=R#6G-!@#WjhedeXN
z$8+t<-X#?PVWH2v#|vk<DM0N_<=F@3(Z8F2#H)gGt8_AdshfZIQByY^dH27MoXiiM
zs_PF_B7@a0uJir=ieR)vv<Ib4I<|09ZZK+S+=nxg8i>c7&*eW`<Rl9bC{JW9w_y`>
zN*Gy`<^P5?ch4F5(2?KMqg|eINB;EN_Yk=1p?{Q;&}RyK+?xtqnMlfAQNk+F-s5(S
zAQ%K~Y*pBu(?@@E^xUl8PEzY$G((-V)h8_4>RYYyf{3g*27LYLsaFWP1%w{3+p%W|
zYS2ZBTo6uo%QP8E>0^%w+UTBJvIwKwtD=u%h@c5^2H3IXJ;$U=qJMbM0Qlf-$3lg!
zWYG6e>cZ@$dy&2HCw;z!H?rb>BVB~f_fQmr5O$6~Q5RA33c6Vft<nbWb{rD;QNM~O
zhvI!|;)Yqr+(Rnr*GG%LOGjczAYf50cTGnvlP}{SQ0_@m!(C6@>>E^?Uer!KYl!EB
zrN4?hw%BKP!3}0<6Djcl$w?W!wN4V^UKBWlX87yI%d>?S9`aKp5f07Q3LVo@zmCKr
zgKAVUua~$g1mQ5t>J=DBSXNO2u9CqLO5)Ts4ATJQ#nedgcvw$;r+e^<9xX;u1f%0(
z>nD}N?~#^JY!_jl37QUTvNCwI38(Q{rO{!jRt9OzL3qAs;m*Qm(5$E^U9IcBvmzx)
zY^sET#tH>DHwCwk9*11AS?zshG&(K*`~v#SSez}4wyQIznaV}mpc^p>BD2~>!=a#b
zYeo1C|JN7J$EOt=+ah>ye@2St*L5`qDm7r6p5VSG$|V~ozBL>280zNMhtiep5+Ewh
zL(Ln=`P4+5%^iwSdoyza#HSx{9;dab9{r2r!yNJ*XXdZg*oe|^Zz=hYC0~5MlH>us
zUwuc67ck`300V-=y9nCz0<qUk3>Cisiu|02A3IoG0ptw~aD9mfWjQ1W5cEnwGtGBU
zHjhTxD27wZLLt<~kgMx;$2P{B-&pDb#xiG(E000~js~7CLeas_3lIy%H@e063li|8
zj@nPA-pt=5%aC`sR`3+k{P!O<r0JC{rSl=Fcsyzi>|M4dL6C5s|JayiVjW_e0pn!c
zPP4|tgj>MHvD{-}_O`!OY|c~g%ifCj)0Nc>j|iDMw;-3Wv=v#J%`M1f5{u(jv|EJB
zB>l$kfd`1Ry>)DrV{;E>+1t4Ga%yhjE3<wY&BZNrMK7aw5AWIPo_XSGvD&0g5@W8+
z`($F0e3*O0%Cq&@S<eg1!)>ofX{#A%Bedm-R==Y?XVtoL4^SDlSrMx-p$^m7X4SEB
zi&6<IOx1s>R~;Xr3u04sEvfvPetAjP=+p&^imlEw%E_2jQ_2&^ii>$<Bg*7S`PFLe
z*sChJhNJ9E7_-gdg%5=+2h=z6!hs?T4N@-Z7<r-^ohxZ#4rl9I$`6;iwQ5J<^zj^*
zyd@SQB)kW_a}o9iiYj$1<k@Jg-ol^}@zYCKX1_5bsh)>jkZWOnLHxM?uzdY0k&g96
zwSFze{Exq89f>P@RP-AqVn_2)^r?Nk?8k3x=&uDUJPu(^R08>q3E36~0m60_qdr@{
zLp;{%AV~p{*e?u*W#VA2;4Z+N`3&!{%gAL!BYx)&>m)BRK-4KS)ZdOB1P<faR25wW
zi|(|xN3l^cfG{T(7@a@OoIh?T@a6V$@BmH6(j!T6vpyziZ8k-$W=BM0me{f!L?x3K
zC0QN?;_%+ZN5SWFps&AdbzSB9lNc>=RH%d%x)@A!(p<-c@VvaBJ6HfgNeH6UVCgRT
z>|QQyQt4H76<!jt_^U_XO!$wLu%8D7NB9fIXKchM{=pUeCUdwCj$dG`gH!=&&BMUR
z$+Yk2N5@hqS-`O($OHnR%{rDut>c1EdW)EnLSz~4E(C~WNO>v1aqY8@I~W_P@v9R6
zrbREO_HBGE1&By^Io=d-_)EVL2<I3>xAe+hkWd|(1a#K>6Y9_a`<X;Nlt7Eh)ktbU
z`PFYfb@Sj^6NPHM8feuNJJx!6@ZqsDE=R=t_F_P?R+>RlVbi_>g;58C3J{}RS5sOv
ziC+`(8v_ZInN5Rko;~kuFp_e+8m1zqFne%a+RJ4~>UAyb)*fscyg*!rX4}4+$IXey
zF2H5y@c4jqs~#puN_s}|it=Z0?$-y+H}^|_?&hoGFfY`S?6tOUW39=GkV%1ZlCH$_
zu-rrGfypT6*^Tr0HFrmT0WAh9oVX`GxED(}`|HMy30BaI$Ah^*64*t*b<B7?N=_3Y
zs&CrU8~g`%BQ`Qf@b@d))uaYLzWtHZwfJx>a?Oi-F8zoG-JUTUoe8%?Qev)L8+nP#
zLV&ByQRRVam9gJyvGr`KJATq#<#-!q5pD{=^zps~kv_S}obWxiG#?&2@&bqjx3Pkc
zw~T%EQm1@x{cK@U9Qpm|-GykOQQ<5>NJ;_TsB~C^{#6?9`;f*9QIU0?Mx$7l?2n+F
z7gDZ&b4N?XzImjt@KE|JFY)kQU*kW5(q2dy2wfFD<F6*+MR`w4;sTtG`TgNsmJVNt
zOqO&^^Em*+ifNz~vrJXcf7~*Cv~=@|WzyDK*7G8NblQCm%6$=(?F$8b_^$Gm_cZ$@
zS%IgSxzo(bzsmYKRYQNt?cME+a#=R9&j1}eEmldRar`0w;CyaF`RF2n7fo)N>Gie<
z@ReK7vrt+6J!%RTylM*KFrc|Uv?f5mpktY$_9IkY8?Hr2`IoCk*paaI`_d!?YTG$P
zwh?RYjAHfCerT$NA;shcc2RzPEgnlH;?el97ac=yuH#2mI}D-!nI81H$X$~1Avm(Q
zpK4;Oum{7gfaBYjcFDfrAA?*Xf5hlil{KpTX05X#Xufq>HQ9zU9#a0O#T6Y3vCXQA
zRB+u<Rgsrl>rZ6JSEwOCsK2ptwlL{MnDIh@B6xB3yV03<nY-9wyRiFqar)AP8vpX6
zVj0<JuBGBbJ)SD3)o4%Jp-`7Ocutkl{jvC<*27QOZP_)o5~Fu(9f=L4Q$2%PY9QX{
z*o^2jdU_M_;iO|tbvl}g3~KC>xbYFYK`#H{r$7A|GMYb3<;y=6P0MS29v<vJi0~zv
zAna$ShQnIY;jDi`5}D$qK%;Y*^T(9bbh}jF7p6Q|Dp!6}JLPeI^|$~1*S}Og5l_BL
z78e@>XC?%32`o(!-m;gpZLp&u`uiJZZO5;pcvXTuY_A?_ee5%K=$4tqMm-)T2!YmL
z!om)3s<`qixPoqp*<Yj+nwoYW2!=Ch`sGCoK2mrxk8#DRR@w0i5UOkrUGgcyQZI5T
z7<9`8OxLj!h~Dw500BDJA!8KI>%)PmSejpL5o1UV9;M<MMFozR-geB5VHgR!d4H!0
zMR=Kw4`W}rCtBxtYy@D>SwwLGJPQLs@VCIWx~5fW^Z_nx+&Fq;5hDtOcg~|f?|A5t
zevLsfhB@ruW9J2~+4)d5@mKSScwc-Nbl=?XC--&rM?iBw1bx&e%qtj0Hvmb)j_dBn
zrHpBT>$rseS_OSIKsdQ0l;_aJVxH6#3Xed+)uJv{LJkyqQip_aa!r~(<pLoSh)*UM
zHdai{xKD<=C^Vn5#p!pCzxI$6m7Bl$vfBQ7tYbcunnsd}stiQ>2cjbo*+@rHk?SO)
zoK8ZvG{F|1KDLj)(iQ2I?lU1OUc-}Eu8u|PJp1^J5~6b6bEUO9W}uG+GTC5#-Vku;
zXINf;$b}ktUUSrj-noGd7wp#oB2LD<Bd_G&36c>NQ`{pSYGbC9oh@ONeg#KWl5{02
z1mo7polVed6KS-VHhkZcGOtxd<VfK%E#tPlb>1_y*M9maN6C|@l(9wDX`VL~Y}9_U
zr*uKCYWkxEOQ|a7LPwOLR}YUiuYyG68e3&;)?2@5asFJV6fF{iRQ^Chda{8Qx)(@w
zZ!ilpp#iWw5+2aRV$<{6!G7k?#`=9`O6jCp5}2AnmMRfLmPO52*VSlBg<p+vEr1{T
z%F$vt5{0JBh}5pC34gg#aoQA}7k`>Dv_uh=PF6xV&URd9E;tZH52)!BHQ>Xz;x{|W
z&50TI|11L6LC4}Y%0bSz)-!tQbPwqYnv^P)38gXZiA{ER#*+x6dD0X3ri(m%itN$M
zaLHXXTzLAFSB^n%n42vxp=jm0AJndmokK9`b&C0yg?J<_&t*Z@ny1_+XO^DCJzLmC
zo>N8R{t9QJHu4Nq;m#X%T>Pmr{*e0N=@8^**kc{~%%9R-J1W6*q7L>(@Ie&s^ebu@
zbSriGY0d6o^_xh=6P{+a-aUTOZZu9a>u(-Cf4j1cp#Rm=u@y|E?qDJ`kJ^>nr(=^S
zZn+O!)MFEvJKe(M>e%Vn9Ia-oA7rY&eVRE8k~kH{qLjA8lqJzgZxN5QB66QtRsac6
zp7Nx&RZ)p2c8HAT1}0W1m$`!bs*12kOPGM&qfU6Dlj;$$!*NWA@}h+X``|YT3iCRD
zJZhvtziU*L^T~tnwx9BC=TGh_=X(nu9^U8046?1Oe0ccayV^nU1ZW)@o~x}on+cRT
z)KN%f0@I1Bm>(?(hG(O;a1u@CEwrEWqU7OFlsu4KU4Yh@1~r|c(W*})A;nnh`HH#c
ziX}zKju;i`%PpSeG`F~Dw^wi+OZvG@mcGyO3t{5#5g;Nb-UU4Ws35-u`~HL~A?clC
zuuj)S$!e7+Crz)G;tGD5qgRywcgZ-7%V!Q#_#HVc=4hgHCZrjQA_YkWAQO5vae)hT
zZak6GnpCP;#>6MD*)%AsH$>^k2KI2sbvJp}qpgjHdg|1g<D?U^-V64=5gj7<sA0E1
zb5Hi4CNSFxNzRQ{X%9+NO6-=xr%|GyF5$LK6x|s$@l#WNlvkSS#VmL*P*xX>`W8IU
z##2*7`m`ugshmM%58C=okJ6M<aeC>8N3JsS2^Bv_uBtbn*ej}<GM>t-M}{Lfl#UKX
ze93um&yX)oYmm#7mL`35QU@dbM~l}rw35=v^wA=iQ{Apn3Eh<WzoYkgzLI&H4UxuV
zo?9JKTUI9&lbz(#)sd4L{klU?ee{{$G(1&EMG9_ca9QUu6<Nd2`t~(soOUFrE@L)w
zO{(fKI%d1?A|W;QDf+STl1WFDlAKv2IH?KRlT5&BARD)uh@>ZoFPhSz6QU3(k8IS3
zv~?V*PmpmM{*qWI9BRX1T}RJ+kvHV~@^k{7xtjHm#WPiq9k1l#<13g+rLG$I!O!a-
zq5#rUwhIpvW-;qPDh+HmC>xu>XHR5JXGt!UlANslobgOz7wl=<WzF|lgO|_|);aN-
zu!iV8!}G4sNkr;o^0@9lKgy#C54K)6Mqf9|WXgdZx0-$3xDTSBw|3hzQ|;vgNR3j^
z;qDfgX#ujM`pMd(Q|-#$*@LfMH*TLzPO&rbuL#OB&O`>Gm&E9Qxa7p>4<&8toss%q
zP=eKC;>}AQHb~}*A455zL{Bo^I~eavM>U7Jeav*PM{QbHhI4#i)S)3Sw~RH5M#7kD
za>`z6{c3om=N~$^h+nx939}o)7JGM1SI@b;<e-{hS6@w7leqy?U{XfcDFOChKl!Ed
zo2bgpu6)j(m*kKd)*MMbAchJD3FZbO`COoYVke#mT{jOxjvjJlPA9Ob<q#(*Q%un<
zo#uPa5YO{{5GB-xR8galN~pc+)wt+{_2-se^L<JPjNzp#moXqk1fGxabA<w1M&*Er
zWtAx*=azxPemR!|F(PvMLzy`-Zv>~%{d3F6`JN%fbbKEK5f>ajX5cWMawq~K74})?
z+|*Ncw7ZP)k_LTs%iZ}N7{x5|h{9e(pMOFpVW>&iY6t^Z;sei6vC1RKk=?!H#2bg5
z$r#`c@^GPB0uWEYk-s(cf)~ga@65vhUw6^z<=(nmrV!6jDhqJ#LaVe7WDHzL79qYp
z=$Doe)!QIEub(8MasNDI65*$Pw2I;Nzhl4bs5*e3z5Tiq=>ShlN?ZgENZm$<s3BB<
z*mbCU3G9#@Sb9h7E2C32PhZ`#k9Y!3oKQpegB+3yxk6=xb~Ys#XpM6xJj-~`Ljp+x
z=rR$fCORL=aRHpy<6Y2Oa=U!oFYtW@xry(7oNUSe-GHXp9ud-0mObHk>`J6R4bOq5
z=sW_HyCk4CJ3tv`TL;+pa5N3&W2!Qw#<XZigcw2!*>*~Vb(bXJS{{5Dj`a0wDL5nB
z$ZWb1Whg$(dQC@hXtHJFGacI9QqId93)J37H1=S^FN+6fv6ylRyO@GHQc*VPDS2wC
zLM=-P*{5vx){lt*A!jtxD@x(al-w**M_h~67;sr+6@fE7WS^7_QKf2A64ad_&ctyn
zZp{;`xsemLPi#&~MCgT(C5a52=ouV4l%DSB!xm{+<Wk3~+ZpLzH7U7@!z;-}xIuEp
z9iM|9FBX@j+|`_27s9|RL=^^I(Zj_Cy^hnE6Z*o|M`VtZgBcpe>tf#e7B00si6iF1
z!WWf`*J+@NII3!XX!Pn`xDUod!!zinsg<R3yFWS&+earSnDP13BkM7HdU}ila$*#D
z$47_mZ%!Kf#pehJ`hDltZC{O$Yrj5{jI)<sOiiZL>p;s7nRUGs*<6Thlp%+dYwUH&
zwva|M>xaK2rJQ^;9v(>3+wy2_kZPZ=#bYBOsrzg3!J&AJIXolmJv$hSv(@?f(E)1}
zJ33%5VU@iwuc=|m@WaLNnccUawVv%CU?YU$9$XY6TA~;7nK(F5fmVLOGym~k_70C8
z6=55cM*oJU`iWQ%KEw5ZmIFWx2z0>bVmU}M%Y_cT9?_y&Oo?i244_@WE_&h`+rS3e
z^YhrS7VBeMealV%H|DxH`!FyDs&eJ)+zlQdS0Oy!pEz1%H$)9dVRF-23A)j^AdUbg
zLfmUW5$KSG5Uz*p{OnF9BTz@4(vlWb!-z7N4iBhFysu#_O=4|^;=HlH``gj=;s^&#
zTGG?02#PX5r8uqoFe3xqV3}-$CY$1OjbV*A`%DlCv;^2}kDkNm@$oJVYP9ueEy|vC
z2qEBk;511CT*0nyB$kZe<zyRU@xk7N#;gx^4bqWhQpKwvTM0fdNjHvVNWx`c%d#j+
zr^@mr@c~GBstDyEM8yOkJ8%bf+9Ynmybj3N5hd#&1zzMe?Sk!RPO61pC;Z1nN+1^y
zh(%s}o#w-V9XJ&HXMz4HbM^2tEC(H6t|5Y@G>!Uvthb-XO?$YBopH*9RI%?#XK7sE
zoB-=kzXc;-rolhJD|;xe6Q5QSztTnEivquhI)o)}+#VcBL)@P_jVBPQC6C_ZBEn8?
z`Hv$&LGBDy;7$sChIvJl`IpR9vV?t^qm7hXu3<N`l+*5rkdc&<i|aVLJZ8nIYL{z~
zWR0Pe5m8fOEkhkM-ms@ASNO_=m|S%^WT1DP(&kN}Kv6K-=jn3pNHalEc_LH5=~^#+
z4lh_n8V0{$ia3xIrE47G!v^mY<@0IO-eDr(kG@ib!%*{zuV}t-tG3GofP!F!<c^d6
z?u<s``0~`<Cs-$t3lx`$LAL6!H4%D?7`Ra-mL6maZ&DMlLm&{1;CKh)(Nvr-8h(^1
zI>w|BceMVX{bJ=bQ+`|8Y**$_Gt=#vEo2%kpJuB-*f5qq%`Tm0*H6dRO?V_u+}-5Z
zr0gng+(W8{EI^M&GpA!;AT7lDY4+wts1h{D?jke9`03cw)9lyKXLcL9g<~(Y&#vG|
z8h9?GnqY&>texen?IIgFDBs{2w-74@2q?U6&@nrDj7{hnj&b*NY!%1;<=MT>xBHWv
zz#?-qAX@4&Vs|0E2jXHap|B1$M<$BxOvoM*6~a^SBkKkOI_w8di+Kb~?Fo3*xZWRf
zVQ@%Sk-pm_uIkvs0&XhU3~EmRyx6;^HX#vPWNBxO=rI)32g>vF5$!Bs!iw~sHz+ac
zLNCb5zhO_30YjKHCIXW(uxLGag^@vnUx@}H(Dnph2Wz*YU!|hp#61+BS!a0%%o_Au
zHtsKMVf$9`{?#Q8kQL;i_XAb4{lx+Y$9LOLziUh)UH#Mi`QzuHjr9KZ@`wC_^4)IZ
z{gVeD?jOilw#SVRH>)4+K449O(qnN!`7m92fAe0*glr2b=Ti&tEw`YY=Wjyeg9pm{
z>WmrxcD``){OOkR{;Oxo`P|0)!Uj8g{{2%tJB;`||MbKC>Gvxe=li43M-EQQnitP!
zpDX82Zk^w*2Sm<66dLI>x(P%pJ+$`_sXA)3F=Sto5+d4;rj@*(m#}a)k)n`@%3>#0
z_7J<i<--ZoH)nrfMdCi;0J|IxypCW&=IlS;kr06d#@kXlMeLnU#~R(0NKAlFnSh7e
zg0)1@pFqzAnes#tL#QIAa1^h1RS>=0G1H{f`L#EgR|xsYk%`6><|G1_b?i)ZBW_y<
zU}Sxay3L9m3wY22ys3Hru*43b)r$bd9>EJ$MShi%$YYSL2bRv9M9Z=+pwTrK>>~9^
zEmjO9q7H}L5+>2AQ1zG~EOx=w?$ZsdCD|UEmu%KO6U5S+@rS$7A~uf93BH=VqcMo<
zz;aD8znj87+z~@+DBRfP0L><*f{5aVbJGL^YdYnKY`~E~3$umI2z8k!fTx)$1`v}x
zmsG?}JDhW7bCiVCYS{5C38^U(CyhaML&sLRJjdpk&U`~H3WOi^FqAS+aUrr4&kN5-
zr@0l4lVkde2G^9ps|_7nN@=nVoheA*(?nz|@cEd~tKKh{e0!>pqk{wIE|UCQxNgMb
z1C2z_ele^bGJA4jFJywdPPR((nD+xwlq$0Kl<3ecM6@!urZk6pSl8=9;jZR{dnJLD
z0`6X?b1FEjCVg7bakEVv#=L1jWyHs_F~HroL(7JUOsFX|Tq>l&cMIE;OwinpSlcFy
zM^GGvOvmiugsN5~a6`xN;@N=cXb1Z(;-($WWwgehgwj2JN9cmVMc5IHNBr1Byh$|z
z3i}raG+z86Wtd{cB4QdJCec8_c4UwpnSsAI)ZvJhP_KTI3y<h=Vgvw(C?d==YVlzW
zC{+7i?Q5wK0TEUrY!h=3MJ)I#d4X}I56%%DVE*8%{b*!3{^}8PRKo-QA;mmJoD?}?
zHEPNc_h}SBO%JM4L{UZ;i}0z9TTD=7J$xDg*z%AXG3OJHHp3VQWqop&SRoB59w!B`
za#994v%Tp^DxS3NhYKU3sXo!o_MS1~Q1_1yPbP)rBB$3u=xF^UbNu4CA;_Oh6gwdW
z^@CoX+=Y5p_IHvR)(~uZ{0!KBnh0U70~nPO&5h%i$jMLe>AjGQpN69adOBp*?}4G!
zmE&C$F`9rOu?Az<W0j5K<;e^`e;s8ZkDtRSPO>LcY~0GpIF0}&$3{PM{M>dZJ_E6r
zf&m~9whquP5?d8vuPYL48;GrFhpxl4g=ei%Cpp1-RhJbhbsYG-Bwi^qUYR(oXtWuU
zv8;;Tnq2?EgeS*o5rGyV#A#r8fk-!?;UQ{Olm}~^sEk<hywTJKejY-)ExwtZx{33l
zFEEWYNpN$!b2s`?1du1asw$01Kh_CIU!xxKIxJriS6hgJ60BZ-CS>IOp#a|-Ct}aR
zlxXpcxWmL1)o(rAHF-3g?wcLv6LQ}LmIS!SHt-u@@OxU-am2)rAsByD!qp?Q7&Ky6
ziX;3<?_*uS?zv>?76R@Uh|`RHpTJ`ag5zu*k^c?7EPxjTl(DZ8^%8Oc>4!*4OrKFf
zuQRxZBf*|v2S1IqaKNPifVk2Yarc5CX>0KFTKk$8k|uDacZAjniMG~p>P4gWfNqpt
zo1hcF6%A<=>@+WGBj$KN&>F8I|F9B3JjE9(2p5ivylXZ!49IqHysMnt0=-@+p+Ql)
zhMhdi4DHxeNY;iOd<)kD6Ny|1m1mC+6kMEORL@z%`=EWpI+?-4jfH0x*BHUOaCkCj
zEsz+8CTza5$XR$u>mQKHk;pLHE>@FST-&S%^M=pZLECH`Y!vJ)IT*qrUV<G?V@H=%
z@7~{X;{>#l3NkRp-JaQcm%H=kpwfPH*gp80$}-;Mp+@5W?N7IJJFgp??djav)CSvf
z_PBXEnxP5`_TkO;%Qr6z?7*|96R%$`w`XqCuJ*kz+FSeWFQ?mwx7x*dQnP9Azi1a%
z-^|~{2crD~DnQ<prrIxNg4BC-oLhTz$}%x_%J#x6n;VsYydBNGemTXbR~ch-Yu{Uc
z{bKy=?rQtij%AXfOa!M|f5!TGQ+&ZDMtjdDCix`xXIO7^3Eq~TNps$n<}5e&R(axE
z+~;3f<;kB=Nr@*ew|}iW>89?)XH5y*5$7TmXyI}<{!|K6`ZVUb0)uc5JSfQ&k-s^V
zsC+^PAf5*BuYZ-$R3uUP+&JD{LVHqx{n+1v6|>nuSSgBntn464k(rZbMHDlLZ^v65
zi@@`35>612uqG-S6O_+(6i(<%Z(~9Ikdzu@&jzC1t{)v-k6%?(C3XGPR^ppGN1Mtw
zcOtP^Jei8PRl)o+mKu(PuSL_b{vH8Ii}c5OJl+bxl(8e!L?RxI0&I)DPr0<Wgf8PZ
z*j48HGddwZ^tuWRHb-mtunZCWN=qr>#L=4TxJU`LE^tDJ1oJU4I90^xlsX}u3W&kk
z@gxpj*Ew4#*3N+(a#F)kMYYAm6!d_-TX~IiSJ-u+i>eSp;(8>S9vX_f)!Mw9oqF@|
z#cAgDY4+A>W*gxG<<o2l(Fx-K7Z{tB1Stq=U_b!ewZ<v;O_|*irNgaB^G0iX43#EI
z7710#zmpV1Rb=ROfpX`QgQRM8;eB-hbFk;tgY(5I$}ue5gj_3WHekMXn)0mfbv$*R
zMuY5_jnIKPQI{q1kTDQb<$4KC%(hkKES;psYUms2s;sUju~Lwj^0w~E=#-Dr6$UVG
zBv4R<LNYQ?q-9VQPi|9WpA2Q!E}mu5E7%Y!1Bb1R9aEOWw#0}M8wGc@bqc3b_`hKa
zr5ou4q_(I~fZtSXRM11?uKI5&5MEaj@!oj&h6-=u{)9TnssAI&HTZpw4976s?2XBg
zt(o!Y^$5J_fQiCVk%ZCaze+?fitEaA_*9RFqS&j>63BeW8H$$f>M_$S_|g#KWG-rh
z`R3K*J#d`e72PK(2G=QI*f9!LFmdZ!4>c*$=|MQXg%QJZXI{*;s+%ITF$Uobh=TA?
zkTt^6z$ObKL{b!?t-NBP&NsqA5MtV;AW%1?jhykGQ!r4J0qzL#HDocYF^W+Kj2d{+
zzS|#d&wRmr&Nuf9XSeRPcb9>Le*K|`Vk*HVUwg|uSy(O5U4QxF&H5Dh%eFnq?y9^-
zfhb-wekBF0*&#*nXNN>ZRWhWIXlRO<<#e;qw<g{^7KGg4x_2OChl*``lEJrV+Jl8C
ztJMo7C|LQD!+NL<swu)U9KpM2vN$iYLAqI(!Q}87#^iM4(J1>cCb}Xf+NqUu^$kva
zLne&e6_)CTQS>7!oOG4F9i1uCq@m}@%Jp5GVh|@MFB-rdp0q2GYa>cb>y0Z{BQey-
zqf!)~6(aI;eIuPvB+D6<@;=kg{x|yBe}!jP;(b~|`7}15MpM2kHhyVmP?NBW?Q|)@
zn1r;aVVC!5{8tv~^FwCT!6ruqy=c5Y5{_t5_8T5lQ%7rw2xKbhhn*bM*Z|UKyc@Q=
zJ_tWoo(%a0V&vBa#J-N^IXu$92B~A+7fpNY3I<>c)-U7`R$tfcf3|m*>JF?vgGnvV
z$y4wRp>GIW(>}MHz%rXafD%jqEHu4}d(pbn1{$Rhw#kfp=el?ejvrdiC>K4J3yOM9
zW}Q#yxcsXDl|A+_3Rk7`?-jt|aq+OE{8B`oW)Be0d<*f<tAJ((bo313t|v_!Wx)2>
z7<fDYXfB*)wm7UBydU=WCb>=RdtaPpzChG;M-M5m@?MchA|*gA`?>>I-K~y$=A>=w
z-0^}f0wA3uK1p(_vIs^V7a#5@7o9a12h1mV<oWpYhx<2CFt2nz`#hw8DBf2K;+qb>
z4Jq#*+(Wgzm!%K6d*_X(3b}IUk8gJN--2^WHW22xZUU+5T6qB1+|r#hK$Y&H_ojPm
zi%xAjp)ead8nwnTxNz)4fzWJPK$_gc;}A@2q+IYvb+=KM&vSGSeZn~Lmp>__@${Ed
z&gtcp>-=&uK#qCbz#*C!Usjt~%2HGWVv*~$6V%e;z#d-oVTI6MQ^$i|(#woo)<$b%
z9lcUi*#meU%H8cr{_3J>j|5pnXE#LNZBvj`ZV+CQT;W~8OW6=W!i;j!O*XModL`os
zAi3Az2s@J81FtxTT6qW$L+Dv)(Yi+h=AjQBBAgw^m^HxV287$3PCjY%)wwob2bbKx
zGN0K;??Zj$;rP|G5;ytlAtgDKxbe-yR9cvf5ha@LRU)w%AjeV(4ZQduh_MuN;g6Qg
zPQ07>7ANb;5)csf)6olGMqJQnfGN#<ydT1pJmu-}ZxsUu7g1h^NMC7=B=}b7s~~7a
z0AE0_zN*IsLIjy{+*uU?2GdT0I1yH-rsJe0B}7a?k25HpqZ=}(rb*<_C0yZ~;IhGi
zFFe?QNDc@&R5?au5BF1GV$o*59ehT{nA8MS=Ak0+ECl`-7auryreh3+cr&ZI2%vkM
zBrux9d)UPC=pN7`$sXV;;5qPt&5ErGKBUjsGrh}p){%adqeFDKN0wtrbiqIbr`*-U
zGxg&ad}RB_jgW6J2I&O8J=l8vatH7PiyQQTU>9e%X5WtH+T*!)WsjmdaulW;xj+c;
zynR`Cw|*Z$4HJ{}fdCQgtbmxvt+KOH5Xj3}O@M^|_>olu-N6SxNSuNP!b4vyn%@_g
z!CPYdkA_aun-H9qi1sAXF&II&aEC;?cf`G~AB0Am)OuL!brrem63C(U^+jMO`^8}d
zxN9s&I6gR}rh1~0-h`TnI003ph;w)HliHvfb~w+asZ)x$`43#9@>0boT~RWBLh_W}
z06+LzV!up>76T3!Uc7YOer}#*Fh|(>5py1lUwl$Z3lGGAZm(xJ!QKo`4{(6@pLnB1
zHS2wFhd+TCHN;**Nq)efnnM0R=Hx}A|M^F1Iu&PDN}}iAB!8qpc1+r&#eVb?`=hQf
zQ{qbeA9_UR($RcZrz86#gF`(7T69P{MBmo&AT%cWwG>?09$=M9X?;C?@o-Y^rDLN`
z-43K<$ssKol{yidIvz4IG@!-g18_s9=I~O9{`4>81K1|^mhU(wSOGNtld`$G@@_CL
xJ9Wcejy*%{^(bE;eOt$a*bl>e1a6?_Kl_RFB>n6suwbzd{~!5$+onim3jka-v04BC

diff --git a/build-tools/build_iso/gather_packages.pl b/build-tools/build_iso/gather_packages.pl
deleted file mode 100755
index 2a239930..00000000
--- a/build-tools/build_iso/gather_packages.pl
+++ /dev/null
@@ -1,122 +0,0 @@
-#!/usr/bin/perl
-
-# Copy/pasted from http://www.smorgasbork.com/content/gather_packages.txt
-# As referenced by http://www.smorgasbork.com/2012/01/04/building-a-custom-centos-7-kickstart-disc-part-2/
-
-use XML::Simple;
-
-my ($comps_file, $rpm_src_path, $rpm_dst_path, $arch, @extra_groups_and_packages) = @ARGV;
-
-if (!-e $comps_file)
-{
-    print_usage ("Can't find '$comps_file'");
-}
-if (!-e $rpm_src_path)
-{
-    print_usage ("RPM source path '$rpm_src_path' does not exist");
-}
-if (!-e $rpm_dst_path)
-{
-    print_usage ("RPM destination path '$rpm_dst_path' does not exist");
-}
-if (!$arch)
-{
-    print_usage ("Architecture not specified");
-}
-
-#### we always gather core and base; note that for CentOS 7, we also need
-#### to include the grub2 package, or installation will fail
-@desired_groups = ('core', 'base', 'grub2');
-foreach (@extra_groups_and_packages)
-{
-    push (@desired_groups, $_);
-}
-
-$regex = '^(' . join ('|', @desired_groups) . ')$';
-
-print "reading $comps_file...\n";
-print "getting RPMs from $rpm_src_path...\n";
-
-$xml = new XML::Simple;
-$comps = $xml->XMLin($comps_file);
-
-$cmd = "rm $rpm_dst_path/*";
-print "$cmd\n";
-`$cmd`;
-
-%copied_groups = {};
-%copied_packages = {};
-
-foreach $group (@{$comps->{group}})
-{
-    $id = $group->{id};
-    if ($id !~ m#$regex#)
-    {
-        next;
-    }
-
-    print "#### group \@$id\n";
-    $packagelist = $group->{packagelist};
-    foreach $pr (@{$packagelist->{packagereq}})
-    {
-        if ($pr->{type} eq 'optional' || $pr->{type} eq 'conditional')
-        {
-            next;
-        }
-
-        $cmd = "cp $rpm_src_path/" . $pr->{content} . "-[0-9]*.$arch.rpm"
-                . " $rpm_src_path/" . $pr->{content} . "-[0-9]*.noarch.rpm $rpm_dst_path";
-        print "$cmd\n";
-        `$cmd 2>&1`;
-
-        $copied_packages{$pr->{content}} = 1;
-    }
-
-    $copied_groups{$group} = 1;
-}
-
-#### assume that any strings that weren't matched in the comps file's group list
-#### are actually packages
-
-foreach $group (@desired_groups)
-{
-    if ($copied_groups{$group})
-    {
-        next;
-    }
-
-    $cmd = "cp $rpm_src_path/" . $group . "-[0-9]*.$arch.rpm"
-            . " $rpm_src_path/" . $group . "-[0-9]*.noarch.rpm $rpm_dst_path";
-    print "$cmd\n";
-    `$cmd 2>&1`;
-}
-
-sub print_usage
-{
-    my ($msg) = @_;
-
-    ($msg) && print "$msg\n\n";
-
-    print <<__TEXT__;
-
-parse_comps.pl comps_file rpm_src_path arch [xtra_grps_and_pkgs]
-
-    comps_file           the full path to the comps.xml file (as provided 
-                         in the original distro
-
-    rpm_src_path         the full path to the directory of all RPMs from 
-                         the distro
-
-    rpm_dst_path         the full path to the directory where you want
-                         to save the RPMs for your kickstart
-
-    arch                 the target system architecture (e.g. x86_64)
-
-    xtra_grps_and_pkgs   a list of extra groups and packages, separated by spaces
-
-
-__TEXT__
-
-    exit;
-}
-
diff --git a/build-tools/build_iso/image-dev.inc b/build-tools/build_iso/image-dev.inc
deleted file mode 100644
index 63bc157d..00000000
--- a/build-tools/build_iso/image-dev.inc
+++ /dev/null
@@ -1,6 +0,0 @@
-# The following packages will not be included in the customer ISO
-#
-# They are exceptional packages only to be included in developer builds
-enable-dev-patch
-fio
-dstat
diff --git a/build-tools/build_iso/image.inc b/build-tools/build_iso/image.inc
deleted file mode 100644
index 13bb3c64..00000000
--- a/build-tools/build_iso/image.inc
+++ /dev/null
@@ -1,84 +0,0 @@
-# List of packages to be included/installed in ISO
-# If these have dependencies, they will be pulled in automatically
-#
-acpid
-gdb
-python2-gunicorn
-iperf3
-isomd5sum
-python2-aodhclient
-python2-oslo-log
-python2-six
-python-d2to1
-hiera
-python2-pecan
-python-configobj
-python-pep8
-python2-rsa
-ruby-shadow
-swig
-syslinux
-iotop
-linuxptp
-procps-ng
-python-daemon
-python-pyudev
-curl
-lvm2
-time
-postgresql
-postgresql-server
-postgresql-contrib
-targetcli
-strace
-wget
-bind-utils
-selinux-policy
-pm-utils
-tcpdump
-sysstat
-smartmontools
-collectd
-puppet-collectd
-socat
-attr
-
-# for realtime kernel
-rtctl
-rt-setup
-
-# For low-latency compute
-OVMF
-
-# neutron bgp
-python2-pankoclient
-
-# ima plugin for RPM
-ntfs-3g
-ntfsprogs
-python-memcached
-python2-coverage
-
-# kubernetes packages
-docker-ce
-etcd
-docker-forward-journald
-
-# Add debugging tools
-zip
-unzip
-traceroute
-
-# support for persistent sessions
-screen
-
-# For kata container
-kata-runtime
-
-# For nvme disk firmware update
-nvme-cli
-
-# Add openscap tools
-openscap
-openscap-scanner
-scap-security-guide
diff --git a/build-tools/build_iso/isolinux.cfg b/build-tools/build_iso/isolinux.cfg
deleted file mode 100644
index d6e00844..00000000
--- a/build-tools/build_iso/isolinux.cfg
+++ /dev/null
@@ -1,125 +0,0 @@
-default vesamenu.c32
-timeout 600
-
-display boot.msg
-
-# Clear the screen when exiting the menu, instead of leaving the menu displayed.
-# For vesamenu, this means the graphical background is still displayed without
-# the menu itself for as long as the screen remains in graphics mode.
-menu clear
-menu background splash.png
-menu title CentOS 7
-menu vshift 8
-menu rows 18
-menu margin 8
-#menu hidden
-menu helpmsgrow 15
-menu tabmsgrow 13
-
-# Border Area
-menu color border * #00000000 #00000000 none
-
-# Selected item
-menu color sel 0 #ffffffff #00000000 none
-
-# Title bar
-menu color title 0 #ff7ba3d0 #00000000 none
-
-# Press [Tab] message
-menu color tabmsg 0 #ff3a6496 #00000000 none
-
-# Unselected menu item
-menu color unsel 0 #84b8ffff #00000000 none
-
-# Selected hotkey
-menu color hotsel 0 #84b8ffff #00000000 none
-
-# Unselected hotkey
-menu color hotkey 0 #ffffffff #00000000 none
-
-# Help text
-menu color help 0 #ffffffff #00000000 none
-
-# A scrollbar of some type? Not sure.
-menu color scrollbar 0 #ffffffff #ff355594 none
-
-# Timeout msg
-menu color timeout 0 #ffffffff #00000000 none
-menu color timeout_msg 0 #ffffffff #00000000 none
-
-# Command prompt text
-menu color cmdmark 0 #84b8ffff #00000000 none
-menu color cmdline 0 #ffffffff #00000000 none
-
-# Do not display the actual menu unless the user presses a key. All that is displayed is a timeout message.
-
-menu tabmsg Press Tab for full configuration options on menu items.
-
-menu separator # insert an empty line
-menu separator # insert an empty line
-
-label tis
-  menu label ^Install Titanium Cloud
-  menu default
-  kernel vmlinuz
-  append initrd=initrd.img inst.ks=cdrom:/dev/cdrom:/ks/ks.cfg
-
-label linux
-  menu label ^Install CentOS 7
-  kernel vmlinuz
-  append initrd=initrd.img inst.stage2=hd:LABEL=CentOS\x207\x20x86_64 quiet
-
-label check
-  menu label Test this ^media & install CentOS 7
-  kernel vmlinuz
-  append initrd=initrd.img inst.stage2=hd:LABEL=CentOS\x207\x20x86_64 rd.live.check quiet
-
-menu separator # insert an empty line
-
-# utilities submenu
-menu begin ^Troubleshooting
-  menu title Troubleshooting
-
-label vesa
-  menu indent count 5
-  menu label Install CentOS 7 in ^basic graphics mode
-  text help
-	Try this option out if you're having trouble installing
-	CentOS 7.
-  endtext
-  kernel vmlinuz
-  append initrd=initrd.img inst.stage2=hd:LABEL=CentOS\x207\x20x86_64 xdriver=vesa nomodeset quiet
-
-label rescue
-  menu indent count 5
-  menu label ^Rescue a CentOS system
-  text help
-	If the system will not boot, this lets you access files
-	and edit config files to try to get it booting again.
-  endtext
-  kernel vmlinuz
-  append initrd=initrd.img inst.stage2=hd:LABEL=CentOS\x207\x20x86_64 rescue quiet
-
-label memtest
-  menu label Run a ^memory test
-  text help
-	If your system is having issues, a problem with your
-	system's memory may be the cause. Use this utility to
-	see if the memory is working correctly.
-  endtext
-  kernel memtest
-
-menu separator # insert an empty line
-
-label local
-  menu label Boot from ^local drive
-  localboot 0xffff
-
-menu separator # insert an empty line
-menu separator # insert an empty line
-
-label returntomain
-  menu label Return to ^main menu
-  menu exit
-
-menu end
diff --git a/build-tools/build_iso/ks.cfg b/build-tools/build_iso/ks.cfg
deleted file mode 100644
index 7613111c..00000000
--- a/build-tools/build_iso/ks.cfg
+++ /dev/null
@@ -1,36 +0,0 @@
-install
-text
-lang en_US.UTF-8
-keyboard us
-reboot --eject
-firstboot --enable
-auth --enableshadow --passalgo=sha512
-
-# Network information
-network  --bootproto=dhcp --device=enp0s3 --onboot=on --ipv6=auto --activate
-network  --bootproto=static --device=enp0s8 --ip=10.10.10.12 --netmask=255.255.255.0 --ipv6=auto --activate
-network --device=lo  --hostname=localhost.localdomain
-
-rootpw --lock
-timezone America/New_York --isUtc
-user --groups=wheel --name=sysadmin --password=$6$c3gaCcJlh.rp//Yx$/mIjNNoUDS1qZldBL29YSJdsA9ttPA/nXN1CPsIcCmionXC22APT3IoRSd9j5dPiZoviDdQf7YxLsOYdieOQr/ --iscrypted --gecos="sysadmin"
-
-# System bootloader configuration
-#bootloader --location=mbr --boot-drive=sda
-
-autopart --type=lvm
-# Partition clearing information
-clearpart --all --initlabel --drives=sda
-
-cdrom
-#repo --name=base --baseurl=http://mirror.cogentco.com/pub/linux/centos/7/os/x86_64/
-#url --url="http://mirror.cogentco.com/pub/linux/centos/7/os/x86_64/"
-
-%packages --nobase --ignoremissing
-@^minimal
-@core
-kexec-tools
-net-tools
-# CGTS packages
-# end CGTS packages
-%end
diff --git a/build-tools/build_iso/minimal_rpm_list.txt b/build-tools/build_iso/minimal_rpm_list.txt
deleted file mode 100644
index cd8123af..00000000
--- a/build-tools/build_iso/minimal_rpm_list.txt
+++ /dev/null
@@ -1,256 +0,0 @@
-acl
-alsa-lib
-audit
-audit-libs
-authconfig
-basesystem
-bind-libs-lite
-bind-license
-binutils
-biosdevname
-btrfs-progs
-bzip2-libs
-ca-certificates
-centos-logos
-chkconfig
-coreutils
-cpio
-cracklib
-cracklib-dicts
-cronie
-cronie-anacron
-crontabs
-cryptsetup
-cryptsetup-libs
-curl
-cyrus-sasl-lib
-dbus
-dbus-glib
-dbus-libs
-dbus-python
-device-mapper
-device-mapper-event
-device-mapper-event-libs
-device-mapper-libs
-device-mapper-multipath
-device-mapper-multipath-libs
-device-mapper-persistent-data
-diffutils
-dmidecode
-dosfstools
-dracut
-dracut-config-rescue
-dracut-network
-e2fsprogs
-e2fsprogs-libs
-efibootmgr
-efivar-libs
-elfutils-libelf
-elfutils-libs
-ethtool
-expat
-file
-file-libs
-filesystem
-findutils
-fipscheck
-fipscheck-lib
-firewalld
-freetype
-gawk
-gdbm
-gettext
-gettext-libs
-glib2
-glibc
-glibc-common
-glib-networking
-gmp
-gnupg2
-gnutls
-gobject-introspection
-gpgme
-grep
-groff-base
-grub2
-grub2-efi-x64
-grub2-tools
-grubby
-gsettings-desktop-schemas
-gzip
-hardlink
-hostname
-hwdata
-info
-iproute
-iprutils
-iptables-ebtables
-iputils
-jansson
-json-c
-kbd
-kbd-legacy
-kbd-misc
-kernel-tools
-kernel-tools-libs
-kexec-tools
-keyutils-libs
-kmod
-kmod-libs
-kpartx
-krb5-libs
-less
-libacl
-libaio
-libassuan
-libattr
-libblkid
-libcap
-libcap-ng
-libcom_err
-libconfig
-libcroco
-libcurl
-libdaemon
-libdb
-libdb-utils
-libdrm
-libedit
-libestr
-libffi
-libgcc
-libgcrypt
-libgomp
-libgpg-error
-libgudev1
-libidn
-libmnl
-libmodman
-libmount
-libndp
-libnetfilter_conntrack
-libnfnetlink
-libnl
-libnl3
-libnl3-cli
-libpcap
-libpciaccess
-libpipeline
-libproxy
-libpwquality
-libreport-filesystem
-libselinux
-libselinux-python
-libselinux-utils
-libsemanage
-libsepol
-libss
-libssh2
-libstdc++
-libsysfs
-libtasn1
-libteam
-libunistring
-libuser
-libutempter
-libuuid
-libverto
-libxml2
-libxslt
-linux-firmware
-lldpad
-lsscsi
-lua
-lvm2
-lvm2-libs
-lzo
-make
-man-db
-mariadb-libs
-mdadm
-microcode_ctl
-mokutil
-mozjs17
-ncurses
-ncurses-base
-ncurses-libs
-nettle
-newt
-newt-python
-nspr
-nss
-nss-softokn
-nss-softokn-freebl
-nss-sysinit
-nss-tools
-nss-util
-numactl-libs
-openscap
-openscap-scanner
-openssl
-openssl-libs
-os-prober
-p11-kit
-p11-kit-trust
-passwd
-pciutils-libs
-pcre
-pinentry
-pkgconfig
-policycoreutils
-popt
-procps-ng
-pth
-python-gobject-base
-pygpgme
-pyliblzma
-python
-python-backports
-python-backports-ssl_match_hostname
-python-configobj
-python-decorator
-python-iniparse
-python-libs
-python-perf
-python-pycurl
-python-pyudev
-python2-setuptools
-python-slip
-python-slip-dbus
-python-urlgrabber
-pyxattr
-qrencode-libs
-readline
-rootfiles
-rpm
-rpm-build-libs
-rpm-libs
-rpm-python
-sed
-shared-mime-info
-shim-x64
-slang
-snappy
-sqlite
-systemd
-systemd-libs
-systemd-sysv
-sysvinit-tools
-tar
-tcp_wrappers-libs
-teamd
-time
-trousers
-tzdata
-ustr
-util-linux
-virt-what
-which
-xfsprogs
-xml-common
-xz
-xz-libs
-zlib
-lksctp-tools
-boost-thread
-boost-system
diff --git a/build-tools/build_iso/openstack_kilo.txt b/build-tools/build_iso/openstack_kilo.txt
deleted file mode 100644
index 6150b175..00000000
--- a/build-tools/build_iso/openstack_kilo.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-# Files copied in from /import/mirrors/CentOS/7.2.1511/cloud/x86_64/openstack-kilo
-
diff --git a/build-tools/build_minimal_iso/README b/build-tools/build_minimal_iso/README
deleted file mode 100644
index 70cba6c5..00000000
--- a/build-tools/build_minimal_iso/README
+++ /dev/null
@@ -1,112 +0,0 @@
-This document describes how to generate a DVD image (.iso) which installs
-a minimal CentOS installation where the entirety of the installed system is
-build from the provided source.
-
-There are three parts to this document:
-  How to build binary RPMs from source RPMS
-  How to build the install disk from the binary RPMS
-  How to install the minimal system
-
--------------------------------------------------------------------------------
-How to build the binary RPMs from the source RPMS
--------------------------------------------------------------------------------
-
-(note - building the binary RPMs is expected to take a long time, ~ 15 hours
-on a typical system)
-
-The source RPMs in the "srcs" subdirectory are compiled in an environment
-called "mock" which builds each package in a chroot jail to ensure the output
-is not influenced by the build system.  Mock is controlled by a config file.
-The example srcs/build.cfg is provided as a starting point, however it does
-to be adjusted for your build environment.  In particular, the paths and repo
-locations need to be configured for your system.  It is highly recommended that
-a local mirror of the CentOS repos be used for speed.  The example config file
-is configured to use an localhost http mirror of the CentOS repos.
-
-To build the binary RPMs from the source RPMs change to the "srcs" subdirectory
-and execute the "build.sh" script.
-
-# cd srcs
-# ./build.sh
-
-This will use build.cfg and mock to compile every source RPM listed in list.txt.
-The output binary RPMs will be in srcs/results.  There will also be success.txt
-and fail.txt files which list any RPMs that failed to build.  Debugging why RPMs
-fail to build is beyond the scope of this document, however be aware that RPMs
-often fail in the "check" phase of the build (i.e. the package compiled fine
-but tests failed).  Notably, the python package may fail due to a "test_nis"
-failure, and the "attr" and "e2fsprogs" packages may or may not fail depending
-on the host file system used for compilation.  These failures may or may not be
-false positives (for example, the mock environment does not have NIS configured
-which is why python's test_nis reports a failure -- the code is actually fine,
-we just can't run the test in the mock environment).
-
-To disable the check phase, add the line
-
-config_opts['rpmbuild_opts'] = '--nocheck'
-
-to build.cfg.  You can then run build.sh again with list.txt containing
-packages which failed:
-
-# cp list.txt list.txt.orig
-# cp fail.txt list.txt
-# ./build.sh
-
--------------------------------------------------------------------------------
-How to build the install disk from the binary RPMS
--------------------------------------------------------------------------------
-
-Once srcs/results is populated with binary RPMs, an installation disk can be
-built.  Edit the yum.conf file and place an (arbitrary) path for yum log and
-cache locations, and make sure that the repository path points to srcs/results.
-Run the build_centos.sh script to build the installation DVD:
-
-# ./build_centos.sh
-
-Scroll up the output to the top of the "Spawning worker" messages.  You should
-observe a line indicating that there are no remaining unresolved dependencies:
-
-...
-Installing PKG=dhcp-common PKG_FILE=dhcp-common-4.2.5-42.el7.centos.tis.1.x86_64.rpm PKG_REL_PATH=dhcp-common-4.2.5-42.el7.centos.tis.1.x86_64.rpm PKG_PATH=/localdisk/loadbuild/jmckenna/centos/srcs/results/dhcp-common-4.2.5-42.el7.centos.tis.1.x86_64.rpm from repo local-std
-dhcp-common
-Debug: Packages still unresolved:
-
-Spawning worker 0 with 4 pkgs
-Spawning worker 1 with 4 pkgs
-Spawning worker 2 with 4 pkgs
-...
-
-This is your confirmation that all required pacakges were found and installed
-on the ISO.  You should also now see a new file called "centosIso.iso":
-
-# ls -l centosIso.iso
-
--------------------------------------------------------------------------------
-How to install the minimal system
--------------------------------------------------------------------------------
-
-The centosIso.iso file can be burned to a DVD or booted in a virtual
-environment.  It is configured to self-install on boot.  After installation,
-a user with sudo access must be created manually.  The system can then be
-booted.
-
-Power the system on with the DVD inserted.  A system install will take place
-(takes approximately 2 minutes).  The system will then report an error and
-ask you if you wish to report a bug, debug, or quit.  Hit control-alt-F2 to
-switch to a terminal window.  Enter the following commands to change to the
-installed system root, and create a (sysadmin) with sudo access:
-
-cd /mnt/sysimage
-chroot .
-groupadd -r wrs
-groupadd -f -g 345 sys_protected
-useradd -m -g wrs -G root,sys_protected,wheel -d /home/sysadmin -p cBglipPpsKwBQ -s /bin/sh sysadmin
-exit
-
-Change back to the main window with control-alt-F1.
-Hit 3 <enter> (the "Quit" option).  The system will reboot (make sure you eject
-the DVD or use your BIOS to boot from hard disk rather than DVD; the installer
-will re-run if the DVD boots again).
-
-You can log into the system as user "sysadmin" with password "sysadmin".
-
diff --git a/build-tools/build_minimal_iso/README.2 b/build-tools/build_minimal_iso/README.2
deleted file mode 100644
index b50db0a3..00000000
--- a/build-tools/build_minimal_iso/README.2
+++ /dev/null
@@ -1,5 +0,0 @@
-The files in this directory are to be used as described at
-http://twiki.wrs.com/PBUeng/DeliveryExtras#Minimal_CentOS_install
-
-They include the scripts (and customer README) for building a minimual
-CentOS ISO from our modified sources.
diff --git a/build-tools/build_minimal_iso/build.cfg b/build-tools/build_minimal_iso/build.cfg
deleted file mode 100644
index 76564b7e..00000000
--- a/build-tools/build_minimal_iso/build.cfg
+++ /dev/null
@@ -1,108 +0,0 @@
-config_opts['root'] = 'jmckenna-centos/mock'
-config_opts['target_arch'] = 'x86_64'
-config_opts['legal_host_arches'] = ('x86_64',)
-config_opts['chroot_setup_cmd'] = 'install @buildsys-build'
-config_opts['dist'] = 'el7'  # only useful for --resultdir variable subst
-config_opts['releasever'] = '7'
-
-config_opts['yum.conf'] = """
-[main]
-keepcache=1
-debuglevel=2
-reposdir=/dev/null
-logfile=/var/log/yum.log
-retries=20
-obsoletes=1
-gpgcheck=0
-assumeyes=1
-syslog_ident=mock
-syslog_device=
-
-# repos
-[my-build]
-name=my-build
-baseurl=http://127.0.0.1:8088/localdisk/loadbuild/centos/src/results
-enabled=1
-skip_if_unavailable=1
-metadata_expire=0
-
-[base]
-name=CentOS-$releasever - Base
-#mirrorlist=http://mirrorlist.centos.org/?release=$releasever&arch=$basearch&repo=os&infra=$infra
-#baseurl=http://mirror.centos.org/centos/$releasever/os/$basearch/
-baseurl=http://127.0.0.1:8088/CentOS/7.2.1511/os/$basearch/
-gpgcheck=1
-gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7
-
-#released updates
-[updates]
-name=CentOS-$releasever - Updates
-#mirrorlist=http://mirrorlist.centos.org/?release=$releasever&arch=$basearch&repo=updates&infra=$infra
-#baseurl=http://mirror.centos.org/centos/$releasever/updates/$basearch/
-baseurl=http://127.0.0.1:8088/CentOS/7.2.1511/updates/$basearch/
-gpgcheck=1
-gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7
-
-#additional packages that may be useful
-[extras]
-name=CentOS-$releasever - Extras
-#mirrorlist=http://mirrorlist.centos.org/?release=$releasever&arch=$basearch&repo=extras&infra=$infra
-#baseurl=http://mirror.centos.org/centos/$releasever/extras/$basearch/
-baseurl=http://127.0.0.1:8088/CentOS/7.2.1511/extras/$basearch/
-gpgcheck=1
-gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7
-
-#additional packages that extend functionality of existing packages
-[centosplus]
-name=CentOS-$releasever - Plus
-#mirrorlist=http://mirrorlist.centos.org/?release=$releasever&arch=$basearch&repo=centosplus&infra=$infra
-#baseurl=http://mirror.centos.org/centos/$releasever/centosplus/$basearch/
-baseurl=http://127.0.0.1:8088/CentOS/7.2.1511/centosplus/$basearch/
-gpgcheck=1
-enabled=1
-gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-CentOS-7
-
-[epel]
-name=Extra Packages for Enterprise Linux 7 - $basearch
-baseurl=http://download.fedoraproject.org/pub/epel/7/$basearch
-#mirrorlist=https://mirrors.fedoraproject.org/metalink?repo=epel-7&arch=$basearch
-failovermethod=priority
-enabled=1
-gpgcheck=1
-gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-EPEL-7
-
-[epel-debuginfo]
-name=Extra Packages for Enterprise Linux 7 - $basearch - Debug
-baseurl=http://download.fedoraproject.org/pub/epel/7/$basearch/debug
-#mirrorlist=https://mirrors.fedoraproject.org/metalink?repo=epel-debug-7&arch=$basearch
-failovermethod=priority
-enabled=0
-gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-EPEL-7
-gpgcheck=1
-
-[epel-source]
-name=Extra Packages for Enterprise Linux 7 - $basearch - Source
-baseurl=http://download.fedoraproject.org/pub/epel/7/SRPMS
-#mirrorlist=https://mirrors.fedoraproject.org/metalink?repo=epel-source-7&arch=$basearch
-failovermethod=priority
-enabled=1
-gpgkey=file:///etc/pki/rpm-gpg/RPM-GPG-KEY-EPEL-7
-gpgcheck=1
-
-
-"""
-config_opts['environment']['BUILD_BY'] = 'jmckenna'
-config_opts['environment']['BUILD_DATE'] = '2016-10-31 14:27:28 -0400'
-config_opts['environment']['REPO'] = '/localdisk/designer/jmckenna/dev0019/cgcs-root'
-config_opts['environment']['WRS_GIT_BRANCH'] = 'CGCS_DEV_0019'
-config_opts['environment']['CGCS_GIT_BRANCH'] = 'CGCS_DEV_0019'
-config_opts['macros']['%_no_cgcs_license_check'] = '1'
-config_opts['macros']['%_tis_build_type'] = 'std'
-config_opts['chroot_setup_cmd'] = 'install @buildsys-build pigz lbzip2 yum shadow-utils rpm-build lbzip2 gcc glibc-headers make gcc-c++ java-devel'
-config_opts['macros']['%__gzip'] = '/usr/bin/pigz'
-config_opts['macros']['%__bzip2'] = '/usr/bin/lbzip2'
-config_opts['macros']['%_patch_confdir'] = '%{_sysconfdir}/patching'
-config_opts['macros']['%_patch_scripts'] = '%{_patch_confdir}/patch-scripts'
-config_opts['macros']['%_runtime_patch_scripts'] = '/run/patching/patch-scripts'
-config_opts['macros']['%_tis_dist'] = '.tis'
-#config_opts['rpmbuild_opts'] = '--nocheck'
diff --git a/build-tools/build_minimal_iso/build.sh b/build-tools/build_minimal_iso/build.sh
deleted file mode 100755
index 7bf9e7a5..00000000
--- a/build-tools/build_minimal_iso/build.sh
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/bin/sh
-
-CREATEREPO=$(which createrepo_c)
-if [ $? -ne 0 ]; then
-    CREATEREPO="createrepo"
-fi
-
-# For backward compatibility.  Old repo location or new?
-CENTOS_REPO=${MY_REPO}/centos-repo
-if [ ! -d ${CENTOS_REPO} ]; then
-    CENTOS_REPO=${MY_REPO}/cgcs-centos-repo
-    if [ ! -d ${CENTOS_REPO} ]; then
-        echo "ERROR: directory ${MY_REPO}/centos-repo not found."
-        exit 1
-    fi
-fi
-
-LOCAL_REPO=${MY_REPO}/local-repo
-if [ ! -d ${LOCAL_REPO} ]; then
-    LOCAL_REPO=${MY_REPO}/cgcs-tis-repo
-    if [ ! -d ${LOCAL_REPO} ]; then
-        # This one isn't fatal, LOCAL_REPO is not required
-        LOCAL_REPO=${MY_REPO}/local-repo
-    fi
-fi
-
-# If a file listed in list.txt is missing, this function attempts to find the
-# RPM and copy it to the local directory.  This should not be required normally
-# and is only used when collecting the source RPMs initially.
-function findSrc {
-    local lookingFor=$1
-    find ${CENTOS_REPO}/Source -name $lookingFor | xargs -I '{}' cp '{}' .
-    find ${LOCAL_REPO}/Source -name $lookingFor | xargs -I '{}' cp '{}' .
-    find $MY_WORKSPACE/std/rpmbuild/SRPMS -name $lookingFor | xargs -I '{}' cp '{}' .
-}
-
-rm -f success.txt
-rm -f fail.txt
-rm -f missing.txt
-mkdir -p results
-infile=list.txt
-
-while read p; do
-
-    if [ ! -f "$p" ]; then
-        findSrc $p
-        if [ ! -f "$p" ]; then
-            echo "couldn't find" >> missing.txt
-            echo "couldn't find $p" >> missing.txt
-            continue
-        fi
-        echo "found $p"
-    fi
-
-    mock -r build.cfg $p --resultdir=results --no-clean
-    if [ $? -eq 0 ]; then
-        echo "$p" >> success.txt
-        cd results
-        $CREATEREPO .
-        cd ..
-    else
-        echo "$p" >> fail.txt
-    fi
-done < $infile
diff --git a/build-tools/build_minimal_iso/build_centos.sh b/build-tools/build_minimal_iso/build_centos.sh
deleted file mode 100755
index e56f6b98..00000000
--- a/build-tools/build_minimal_iso/build_centos.sh
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/bin/bash
-
-# Build a basic CentOS system
-
-CREATEREPO=$(which createrepo_c)
-if [ $? -ne 0 ]; then
-    CREATEREPO="createrepo"
-fi
-
-function final_touches {
-   # create the repo
-    cd ${ROOTDIR}/${DEST}/isolinux
-    $CREATEREPO -g ../comps.xml .
-
-   # build the ISO
-    printf "Building image $OUTPUT_FILE\n"
-    cd ${ROOTDIR}/${DEST}
-    chmod 664 isolinux/isolinux.bin
-    mkisofs -o $OUTPUT_FILE \
-        -R -D -A 'oe_iso_boot' -V 'oe_iso_boot' \
-        -b isolinux.bin -c boot.cat -no-emul-boot \
-        -boot-load-size 4 -boot-info-table \
-        -eltorito-alt-boot \
-        -e images/efiboot.img \
-        -no-emul-boot \
-        isolinux/
-
-    isohybrid --uefi $OUTPUT_FILE
-    implantisomd5 $OUTPUT_FILE
-
-    cd $ROOTDIR
-}
-
-function setup_disk {
-    tar xJf emptyInstaller.tar.xz
-    mkdir ${DEST}/isolinux/Packages
-}
-
-function install_packages {
-    cd ${DEST}/isolinux/Packages
-    ROOT=${ROOTDIR} ../../../cgts_deps.sh --deps=../../../${MINIMAL}
-    cd ${ROOTDIR}
-}
-
-
-ROOTDIR=$PWD
-INSTALLER_SRC=basicDisk
-DEST=newDisk
-PKGS_DIR=all_rpms
-MINIMAL=minimal_rpm_list.txt
-OUTPUT_FILE=${ROOTDIR}/centosIso.iso
-
-# Make a basic install disk (no packages, at this point)
-rm -rf ${DEST}
-setup_disk
-
-# install the packages (initially from minimal list, then resolve deps)
-install_packages
-
-# build the .iso
-final_touches
-
diff --git a/build-tools/build_minimal_iso/cgts_deps.sh b/build-tools/build_minimal_iso/cgts_deps.sh
deleted file mode 100755
index 9e4a88b2..00000000
--- a/build-tools/build_minimal_iso/cgts_deps.sh
+++ /dev/null
@@ -1,265 +0,0 @@
-#!/bin/env bash
-
-#
-# Copyright (c) 2018-2020 Wind River Systems, Inc.
-#
-# SPDX-License-Identifier: Apache-2.0
-#
-
-CGTS_DEPS_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}" )" )"
-
-# Set REPOQUERY, REPOQUERY_SUB_COMMAND, REPOQUERY_RESOLVE and
-# REPOQUERY_WHATPROVIDES_DELIM for our build environment.
-source ${CGTS_DEPS_DIR}/../pkg-manager-utils.sh
-
-function generate_dep_list {
-    TMP_RPM_DB=$(mktemp -d $(pwd)/tmp_rpm_db_XXXXXX)
-    mkdir -p $TMP_RPM_DB
-    rpm --initdb --dbpath $TMP_RPM_DB
-    rpm --dbpath $TMP_RPM_DB --test -Uvh --replacefiles '*.rpm' > $DEPLISTFILE_NEW 2>&1
-    cat $DEPLISTFILE_NEW >> $DEPDETAILLISTFILE
-    cat $DEPLISTFILE_NEW \
-        | grep -v   -e "error:" -e "warning:" -e "Preparing..." \
-                    -e "Verifying..." -e "installing package" \
-        | sed -e "s/ is needed by.*$//" -e "s/ [<=>].*$//" \
-        | sort -u > $DEPLISTFILE
-    \rm -rf $TMP_RPM_DB
-}
-
-join_array() {
-    local IFS="$1"
-    shift
-    echo "$*"
-}
-
-function install_deps {
-    local DEP_LIST=""
-    local DEP_LIST_ARRAY=()
-    local DEP_LIST_FILE="$1"
-
-    rm -f $TMPFILE
-
-    while read DEP
-    do
-        DEP_LIST_ARRAY+=( "${DEP}" )
-    done < $DEP_LIST_FILE
-
-    if [ "${REPOQUERY_WHATPROVIDES_DELIM}" != " " ]; then
-        DEP_LIST_ARRAY=( "$(join_array "${REPOQUERY_WHATPROVIDES_DELIM}" "${DEP_LIST_ARRAY[@]}" )" )
-    fi
-
-    echo "Debug: List of deps to resolve: ${DEP_LIST_ARRAY[@]}"
-
-    if [ ${#DEP_LIST_ARRAY[@]} -gt 0 ]; then
-        return 0
-    fi
-
-    # go through each repo and convert deps to packages
-    for REPOID in `grep  '^[[].*[]]$' $YUM | grep -v '[[]main[]]' | awk -F '[][]' '{print $2 }'`; do
-        echo "TMPDIR=${TMP_DIR}"\
-             "${REPOQUERY} --config=${YUM} --repoid=${REPOID}"\
-             "${REPOQUERY_SUB_COMMAND} --arch=x86_64,noarch"\
-             "--qf='%{name}' --whatprovides ${DEP_LIST_ARRAY[@]}"
-        TMPDIR=${TMP_DIR} \
-            ${REPOQUERY} --config=${YUM} --repoid=${REPOID} \
-            ${REPOQUERY_SUB_COMMAND} --arch=x86_64,noarch \
-            --qf='%{name}' --whatprovides ${DEP_LIST_ARRAY[@]} \
-            | sed "s/kernel-debug/kernel/g" >> $TMPFILE
-        \rm -rf $TMP_DIR/yum-$USER-*
-    done
-    sort $TMPFILE -u > $TMPFILE1
-    rm $TMPFILE
-
-    DEP_LIST=""
-    while read DEP
-    do
-        DEP_LIST+="${DEP} "
-    done < $TMPFILE1
-    rm $TMPFILE1
-
-    # next go through each repo and install packages
-    local TARGETS="${DEP_LIST}"
-    echo "Debug: Resolved list of deps to install: ${TARGETS}"
-    local UNRESOLVED
-    for REPOID in `grep  '^[[].*[]]$' $YUM | grep -v '[[]main[]]' | awk -F '[][]' '{print $2 }'`; do
-        UNRESOLVED=" $TARGETS "
-
-        if [[ ! -z "${TARGETS// }" ]]; then
-            REPO_PATH=$(cat $YUM | sed -n "/^\[$REPOID\]\$/,\$p" | grep '^baseurl=' | head -n 1 | awk -F 'file://' '{print $2}' | sed 's:/$::')
-            >&2 echo "TMPDIR=${TMP_DIR}"\
-                    "${REPOQUERY} --config=${YUM} --repoid=${REPOID}"\
-                    "${REPOQUERY_SUB_COMMAND} --arch=x86_64,noarch"\
-                    "--qf='%{name} %{name}-%{version}-%{release}.%{arch}.rpm %{relativepath}'"\
-                    "${REPOQUERY_RESOLVE} ${TARGETS}"
-            TMPDIR=${TMP_DIR} \
-                ${REPOQUERY} --config=${YUM} --repoid=${REPOID} \
-                ${REPOQUERY_SUB_COMMAND} --arch=x86_64,noarch \
-                --qf="%{name} %{name}-%{version}-%{release}.%{arch}.rpm %{relativepath}" \
-                ${REPOQUERY_RESOLVE} ${TARGETS} \
-                | sort -r -V >> $TMPFILE
-
-            \rm -rf $TMP_DIR/yum-$USER-*
-
-            while read STR
-            do
-                >&2 echo "STR=$STR"
-                if [ "x$STR" == "x" ]; then
-                    continue
-                fi
-
-                PKG=`echo $STR | cut -d " " -f 1`
-                PKG_FILE=`echo $STR | cut -d " " -f 2`
-                PKG_REL_PATH=`echo $STR | cut -d " " -f 3`
-                PKG_PATH="${REPO_PATH}/${PKG_REL_PATH}"
-
-                >&2 echo "Installing PKG=$PKG PKG_FILE=$PKG_FILE PKG_REL_PATH=$PKG_REL_PATH PKG_PATH=$PKG_PATH from repo $REPOID"
-                cp $PKG_PATH .
-                if [ $? -ne 0 ]; then
-                    >&2 echo "  Here's what I have to work with..."
-                    >&2 echo "  TMPDIR=${TMP_DIR}"\
-                            "${REPOQUERY} -c ${YUM} --repoid=${REPOID}"\
-                            "${REPOQUERY_SUB_COMMAND} --arch=x86_64,noarch"\
-                            "--qf=\"%{name} %{name}-%{version}-%{release}.%{arch}.rpm %{relativepath}\""\
-                            "${REPOQUERY_RESOLVE} ${PKG}"
-                    >&2 echo "  PKG=$PKG PKG_FILE=$PKG_FILE REPO_PATH=$REPO_PATH PKG_REL_PATH=$PKG_REL_PATH PKG_PATH=$PKG_PATH"
-                fi
-
-                echo $UNRESOLVED | grep $PKG
-                echo $UNRESOLVED | grep $PKG >> /dev/null
-                if [ $? -eq 0 ]; then
-                    echo "$PKG found in $REPOID as $PKG" >> $BUILT_REPORT
-                    echo "$PKG_PATH" >> $BUILT_REPORT
-                    UNRESOLVED=$(echo "$UNRESOLVED" | sed "s# $PKG # #g")
-                else
-                    echo "$PKG satisfies unknown target in $REPOID" >> $BUILT_REPORT
-                    echo "  but it doesn't match targets, $UNRESOLVED" >> $BUILT_REPORT
-                    echo "  path $PKG_PATH" >> $BUILT_REPORT
-                    FOUND_UNKNOWN=1
-                fi
-            done < $TMPFILE
-
-            \rm -rf $TMP_DIR/yum-$USER-*
-            TARGETS="$UNRESOLVED"
-        fi
-    done
-    >&2 echo "Debug: Packages still unresolved: $UNRESOLVED"
-    echo "Debug: Packages still unresolved: $UNRESOLVED" >> $WARNINGS_REPORT
-    echo "Debug: Packages still unresolved: $UNRESOLVED" >> $BUILT_REPORT
-    >&2 echo ""
-}
-
-function check_all_explicit_deps_installed {
-
-    PKGS_TO_CHECK=" "
-    while read PKG_TO_ADD
-    do
-        PKGS_TO_CHECK="$PKGS_TO_CHECK ${PKG_TO_ADD}"
-    done < $DEPLISTFILE
-    rpm -qp ${INSTALLDIR}/*.rpm --qf="%{name}\n" > $TMPFILE
-
-    echo "checking... $PKGS_TO_CHECK vs ${INSTALLED_PACKAGE}"
-
-    while read INSTALLED_PACKAGE
-    do
-        echo $PKGS_TO_CHECK | grep -q "${INSTALLED_PACKAGE}"
-        if [ $? -eq 0 ]; then
-            PKGS_TO_CHECK=`echo $PKGS_TO_CHECK | sed "s/^${INSTALLED_PACKAGE} //"`
-            PKGS_TO_CHECK=`echo $PKGS_TO_CHECK | sed "s/ ${INSTALLED_PACKAGE} / /"`
-            PKGS_TO_CHECK=`echo $PKGS_TO_CHECK | sed "s/ ${INSTALLED_PACKAGE}\$//"`
-            PKGS_TO_CHECK=`echo $PKGS_TO_CHECK | sed "s/^${INSTALLED_PACKAGE}\$//"`
-        fi
-    done < $TMPFILE
-
-    if [ -z "$PKGS_TO_CHECK" ]; then
-        >&2 echo "All explicitly specified packages resolved!"
-    else
-        >&2 echo "Could not resolve packages: $PKGS_TO_CHECK"
-        return 1
-    fi
-    return 0
-}
-
-if [ "x${ROOT}" == "x" ]; then
-    ROOT=/localdisk/loadbuild/centos
-fi
-
-ATTEMPTED=0
-DISCOVERED=0
-OUTPUT_DIR=${ROOT}/newDisk
-YUM=${ROOT}/yum.conf
-TMP_DIR=${ROOT}/tmp
-DEPLISTFILE=${ROOT}/deps.txt
-DEPLISTFILE_NEW=${ROOT}/deps_new.txt
-DEPDETAILLISTFILE=${ROOT}/deps_detail.txt
-INSTALLDIR=${ROOT}/newDisk/isolinux/Packages
-
-BUILT_REPORT=${ROOT}/local.txt
-WARNINGS_REPORT=${ROOT}/warnings.txt
-LAST_TEST=${ROOT}/last_test.txt
-TMPFILE=${ROOT}/cgts_deps_tmp.txt
-TMPFILE1=${ROOT}/cgts_deps_tmp1.txt
-
-touch "$BUILT_REPORT"
-touch "$WARNINGS_REPORT"
-
-for i in "$@"
-do
-case $i in
-    -d=*|--deps=*)
-    DEPS="${i#*=}"
-    shift # past argument=value
-    ;;
-esac
-done
-
-mkdir -p $TMP_DIR
-
-rm -f "$DEPDETAILLISTFILE"
-# FIRST PASS we are being given a list of REQUIRED dependencies
-if [ "${DEPS}x" != "x" ]; then
-    cat $DEPS | grep -v "^#" > $DEPLISTFILE
-    install_deps $DEPLISTFILE
-    if [ $? -ne 0 ]; then
-        exit 1
-    fi
-fi
-
-# check that we resolved them all
-check_all_explicit_deps_installed
-if [ $? -ne 0 ]; then
-    >&2 echo "Error -- could not install all explicitly listed packages"
-    exit 1
-fi
-
-ALL_RESOLVED=0
-
-while [ $ALL_RESOLVED -eq 0 ]; do
-    cp $DEPLISTFILE $DEPLISTFILE.old
-    generate_dep_list
-    if [ ! -s $DEPLISTFILE ]; then
-        # no more dependencies!
-        ALL_RESOLVED=1
-    else
-        DIFFLINES=`diff $DEPLISTFILE.old $DEPLISTFILE | wc -l`
-        if [ $DIFFLINES -eq 0 ]; then
-            >&2 echo "Warning: Infinite loop detected in dependency resolution.  See $DEPLISTFILE for details -- exiting"
-            >&2 echo "These RPMS had problems (likely version conflicts)"
-            >&2 cat  $DEPLISTFILE
-
-            echo "Warning: Infinite loop detected in dependency resolution See $DEPLISTFILE for details -- exiting" >> $WARNINGS_REPORT
-            echo "These RPMS had problems (likely version conflicts)" >> $WARNINGS_REPORT
-            cat  $DEPLISTFILE >> $WARNINGS_REPORT
-
-            date > $LAST_TEST
-
-            rm -f $DEPLISTFILE.old
-            exit 1 # nothing fixed
-        fi
-        install_deps $DEPLISTFILE
-        if [ $? -ne 0 ]; then
-            exit 1
-        fi
-    fi
-done
-
-exit 0
diff --git a/build-tools/build_minimal_iso/yum.conf b/build-tools/build_minimal_iso/yum.conf
deleted file mode 100644
index 82c6be87..00000000
--- a/build-tools/build_minimal_iso/yum.conf
+++ /dev/null
@@ -1,22 +0,0 @@
-
-[main]
-cachedir=/localdisk/loadbuild/jmckenna/centos/yum/cache
-keepcache=1
-debuglevel=2
-reposdir=/dev/null
-logfile=/localdisk/loadbuild/jmckenna/centos/yum/yum.log
-retries=20
-obsoletes=1
-gpgcheck=0
-assumeyes=1
-syslog_ident=mock
-syslog_device=
-
-# repos
-[local-std]
-name=local-std
-baseurl=file:///localdisk/loadbuild/jmckenna/centos/srcs/results
-enabled=1
-skip_if_unavailable=1
-metadata_expire=0
-
diff --git a/build-tools/classify b/build-tools/classify
deleted file mode 100644
index 9594db38..00000000
--- a/build-tools/classify
+++ /dev/null
@@ -1,55 +0,0 @@
-classify () {
-   local pkg_dir="$1"
-
-   if [ -f $pkg_dir/centos/srpm_path ]; then
-      # echo "srpm + patch: $(basename $(cat $pkg_dir/centos/srpm_path | head -n 1))"
-      echo "srpm + patches"
-   elif [ -f $pkg_dir/centos/*.spec ]; then
-      if [ -f $pkg_dir/centos/build_srpm ]; then
-         # echo "spec + custom_script: $pkg_dir"         
-         echo "spec + custom_script"         
-      elif [ -f $pkg_dir/centos/build_srpm.data ]; then
-         local ALLOW_EMPTY_RPM=""
-         local COPY_LIST=""
-         local SRC_DIR=""
-         local PKG_BASE="$pkg_dir"
-         source $pkg_dir/centos/build_srpm.data
-
-         if [ "" != "$SRC_DIR" ] ; then
-            # echo "spec + src_dir: $pkg_dir/$SRC_DIR"
-            echo "spec + src_dir"
-         elif [ "" != "$COPY_LIST" ] ; then
-            local TARBALL=""
-            for f in $COPY_LIST; do
-               case $f in
-                  *.tar.gz)  TARBALL=$f ;;
-                  *.tgz)     TARBALL=$f ;;
-                  *.tar.bz2) TARBALL=$f ;;
-                  *.tar.xz)  TARBALL=$f ;;
-                  *.tar)     TARBALL=$f ;;
-               esac
-            done
-            if [ "" != "$TARBALL" ]; then
-               # echo "spec + tarball: $pkg_dir/$TARBALL"
-               echo "spec + tarball"
-            else
-               # echo "spec + files: $pkg_dir"
-               echo "spec + files"
-            fi
-         elif [ "$ALLOW_EMPTY_RPM" == "true" ] ; then
-            # echo "spec + empty: $pkg_dir"
-            echo "spec + empty"
-         else
-            # echo "spec + build_srpm.data + unknown: $pkg_dir"
-            # cat $pkg_dir/centos/build_srpm.data
-            echo "spec + build_srpm.data + unknown"
-         fi
-      else
-         # echo "spec + unknown: $pkg_dir"
-         echo "spec + unknown"
-      fi
-   else
-      # echo "unknown: $pkg_dir"
-      echo "unknown"
-   fi
-}
diff --git a/build-tools/create-yum-conf b/build-tools/create-yum-conf
deleted file mode 100755
index 67a8486f..00000000
--- a/build-tools/create-yum-conf
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/bin/bash
-
-#
-# usage: create-yum-conf [<layer>]
-#
-
-LAYER=${1:-$LAYER}
-
-if [ "$MY_WORKSPACE" == "" ]; then
-   echo "ERROR: MY_WORKSPACE not defined"
-   exit 1;
-fi
-
-if [ "$MY_REPO" == "" ]; then
-   echo "ERROR: MY_REPO not defined"
-   exit 1;
-fi
-
-if [ "$MY_BUILD_ENVIRONMENT" == "" ]; then
-   echo "ERROR: MY_BUILD_ENVIRONMENT not defined"
-   exit 1;
-fi
-
-if [ "$MY_BUILD_DIR" == "" ]; then
-   echo "ERROR: MY_BUILD_DIR not defined"
-   exit 1;
-fi
-
-MY_YUM_CONF="$MY_WORKSPACE/yum.conf"
-YUM_DIR="$MY_WORKSPACE/yum"
-YUM_CACHE="$YUM_DIR/cache"
-
-# For backward compatibility.  Old repo location or new?
-CENTOS_REPO=${MY_REPO}/centos-repo
-if [ ! -d ${CENTOS_REPO} ]; then
-    CENTOS_REPO=${MY_REPO}/cgcs-centos-repo
-    if [ ! -d ${CENTOS_REPO} ]; then
-        echo "ERROR: directory ${MY_REPO}/centos-repo not found."
-        exit 1
-    fi
-fi
-
-# Try to find a layer specific mock.cfg.proto
-MOCK_CFG_PROTO="${CENTOS_REPO}/mock.cfg.${LAYER}.proto"
-if [ ! -f "$MOCK_CFG_PROTO" ]; then
-    # Not present, Use default mock.cfg.proto
-    MOCK_CFG_PROTO="${CENTOS_REPO}/mock.cfg.proto"
-fi
-
-
-if [ -f "$MOCK_CFG_PROTO" ]; then
-    if [ -f "$MY_YUM_CONF" ]; then
-        N=$(find $MOCK_CFG_PROTO $MY_REPO/build-tools/create-yum-conf -cnewer $MY_YUM_CONF | wc -l) 
-        if [ $N -gt 0 ]; then
-            # New inputs, remove to force regeneration of yum.conf
-            \rm -f "$MY_YUM_CONF"
-        fi
-    fi
-fi
-   
-if [ ! -f "$MY_YUM_CONF" ]; then
-    if [ -f "$MOCK_CFG_PROTO" ]; then
-        mock_cfg_to_yum_conf.py "$MOCK_CFG_PROTO"      > "$MY_YUM_CONF"
-        sed -i "s%\[main\]%&\ncachedir=$YUM_CACHE%"      "$MY_YUM_CONF"
-        sed -i "s%logfile=.*%logfile=$YUM_DIR/yum.log%"  "$MY_YUM_CONF"
-        # eg: LOCAL_BASE/MY_BUILD_DIR => file:///MY_BUILD_DIR
-        sed -i "s%LOCAL_BASE%file://%g"                  "$MY_YUM_CONF"
-        sed -i "s%MIRROR_BASE%file:///import/mirrors%g"  "$MY_YUM_CONF"
-        sed -i "s%BUILD_ENV%$MY_BUILD_ENVIRONMENT%g"     "$MY_YUM_CONF"
-        # eg: file:///MY_BUILD_DIR => file:///localdisk/loadbuild/...
-        sed -i "s%/MY_BUILD_DIR%$MY_BUILD_DIR%g"         "$MY_YUM_CONF"
-        sed -i "s%/MY_REPO_DIR%$MY_REPO%g"               "$MY_YUM_CONF"
-        # eg = MY_BUILD_DIR/xyz => /localdisk/loadbuild/.../xyz
-        sed -i "s%MY_BUILD_DIR%$MY_BUILD_DIR%g"          "$MY_YUM_CONF"
-        sed -i "s%MY_REPO_DIR%$MY_REPO%g"                "$MY_YUM_CONF"
-    else
-        echo "ERROR: Could not find yum.conf or MOCK_CFG_PROTO"
-        exit 1
-    fi
-fi
-
-if [ ! -d "$YUM_CACHE" ]; then
-    mkdir -p "$YUM_CACHE"
-fi
-
-echo "$MY_YUM_CONF"
-exit 0
diff --git a/build-tools/create_dependancy_cache.py b/build-tools/create_dependancy_cache.py
deleted file mode 100755
index 0226bb31..00000000
--- a/build-tools/create_dependancy_cache.py
+++ /dev/null
@@ -1,716 +0,0 @@
-#!/usr/bin/python
-
-#
-# Copyright (c) 2018 Wind River Systems, Inc.
-#
-# SPDX-License-Identifier: Apache-2.0
-#
-
-#
-# Create a RPM dependency cache frpm the RPMS found in
-# 1) $MY_REPO/centos-repo
-# 2) $MY_WORKSPACE/$BUILD_TYPE/rpmbuild/
-#
-# Cache files are written to $MY_REPO/local-repo/dependancy-cache
-# unless an alternate path is supplied.
-#
-# The cache is a set of files that are easily digested by 
-# common shell script tools.  Each file has format
-#   <rpm-name>;<comma-seperated-list-of-rpm-names>
-#
-# The files created are:
-#   RPM-direct-descendants       RPMS that have a direct Requires on X
-#   RPM-transitive-descendants   RPMS that have a possibly indirect need for X
-#
-#   RPM-direct-requires          RPMS directly Required by X
-#   RPM-transitive-requires      RPMS possibly indirectly Required by X
-#
-#   SRPM-direct-descendants      SRPMS whos RPMS have a direct Requires on RPMS built by X
-#   SRPM-transitive-descendants  SRPMS whos RPMS have a possibly indirect need for RPMS built by X
-#
-#   SRPM-direct-requires         SRPMS whos RPMS satisfy a direct BuildRequires of X
-#   SRPM-transitive-requires     SRPMS whos RPMS satisfy an indirect BuildRequires of X
-#
-#   SRPM-direct-requires-rpm      RPMS that satisfy a direct BuildRequires of X
-#   SRPM-transitive-requires-rpm  RPMS that satisfy an indirect BuildRequires of X
-#
-#   rpm-to-srpm                   Map RPM back to the SRPM that created it
-#   srpm-to-rpm                   Map a SRPM to the set of RPMS it builds
-#
-
-import xml.etree.ElementTree as ET
-import fnmatch
-import os
-import shutil
-import gzip
-import sys
-import string
-from optparse import OptionParser
-
-ns = { 'root': 'http://linux.duke.edu/metadata/common',
-       'filelists': 'http://linux.duke.edu/metadata/filelists',
-       'rpm': 'http://linux.duke.edu/metadata/rpm' }
-
-build_types=['std', 'rt']
-rpm_types=['RPM', 'SRPM']
-default_arch = 'x86_64'
-default_arch_list = [ 'x86_64', 'noarch' ]
-default_arch_by_type = {'RPM': [ 'x86_64', 'noarch' ],
-                        'SRPM': [ 'src' ]
-                       }
-
-repodata_dir="/export/jenkins/mirrors"
-if not os.path.isdir(repodata_dir):
-    repodata_dir="/import/mirrors"
-    if not os.path.isdir(repodata_dir):
-        print("ERROR: directory not found %s" % repodata_dir)
-        sys.exit(1)
-
-old_cache_dir="%s/cgcs-tis-repo/dependancy-cache" % os.environ['MY_REPO']
-publish_cache_dir="%s/local-repo/dependancy-cache" % os.environ['MY_REPO']
-
-workspace_repo_dirs={}
-for rt in rpm_types:
-    workspace_repo_dirs[rt]={}
-    for bt in build_types:
-        workspace_repo_dirs[rt][bt]="%s/%s/rpmbuild/%sS" % (os.environ['MY_WORKSPACE'], bt, rt)
-
-if not os.path.isdir(os.environ['MY_REPO']):
-    print("ERROR: directory not found MY_REPO=%s" % os.environ['MY_REPO'])
-    sys.exit(1)
-
-centos_repo_dir="%s/centos-repo" % os.environ['MY_REPO']
-if not os.path.isdir(centos_repo_dir):
-    # Test for the old path
-    centos_repo_dir="%s/cgcs-centos-repo" % os.environ['MY_REPO']
-    if not os.path.isdir(centos_repo_dir):
-        # That doesn't exist either
-        centos_repo_dir="%s/centos-repo" % os.environ['MY_REPO']
-        print("ERROR: directory not found %s" % centos_repo_dir)
-        sys.exit(1)
-
-bin_rpm_mirror_roots = ["%s/Binary" % centos_repo_dir]
-src_rpm_mirror_roots = ["%s/Source" % centos_repo_dir]
-
-for bt in build_types:
-    bin_rpm_mirror_roots.append(workspace_repo_dirs['RPM'][bt])
-    src_rpm_mirror_roots.append(workspace_repo_dirs['SRPM'][bt])
-
-parser = OptionParser('create_dependancy_cache')
-parser.add_option('-c', '--cache_dir', action='store', type='string',
-    dest='cache_dir', help='set cache directory')
-parser.add_option('-t', '--third_party_repo_dir', action='store',
-    type='string', dest='third_party_repo_dir',
-    help='set third party directory')
-(options, args) = parser.parse_args()
-
-if options.cache_dir:
-    publish_cache_dir = options.cache_dir
-
-if options.third_party_repo_dir:
-    third_party_repo_dir = options.third_party_repo_dir
-    bin_rpm_mirror_roots.append(third_party_repo_dir)
-    src_rpm_mirror_roots.append(third_party_repo_dir)
-    if not os.path.isdir(third_party_repo_dir):
-        print("ERROR: directory not found %s" % third_party_repo_dir)
-        sys.exit(1)
-
-# Create directory if required
-if not os.path.isdir(publish_cache_dir):
-    if os.path.isdir(old_cache_dir):
-        print("Relocating old dependency directory: %s -> %s" % (old_cache_dir, publish_cache_dir))
-        os.makedirs(os.path.abspath(os.path.join(publish_cache_dir, os.pardir)))
-        shutil.move(old_cache_dir, publish_cache_dir)
-    else:
-        print("Creating directory: %s" % publish_cache_dir)
-        os.makedirs(publish_cache_dir, 0o755)
-
-# The Main data structure
-pkg_data={}
-
-for rpm_type in rpm_types:
-    pkg_data[rpm_type]={}
-
-    # map provided_name -> pkg_name
-    pkg_data[rpm_type]['providers']={}
-
-    # map pkg_name -> required_names ... could be a pkg, capability or file
-    pkg_data[rpm_type]['requires']={}
-
-    # map file_name -> pkg_name
-    pkg_data[rpm_type]['file_owners']={}
-
-    # map pkg_name -> file_name
-    pkg_data[rpm_type]['files']={}
-
-    # map pkg_name -> required_pkg_names ... only pkg names, and only direct requirement
-    pkg_data[rpm_type]['pkg_direct_requires']={}
-
-    # map pkg_name -> required_pkg_names ... only pkg names, but this is the transitive list of all requirements
-    pkg_data[rpm_type]['pkg_transitive_requires']={}
-
-    # map pkg_name -> descendant_pkgs ... only packages the directly require this package
-    pkg_data[rpm_type]['pkg_direct_descendants']={}
-
-    # map pkg_name -> descendant_pkgs ... packages that have a transitive requiremant on this package
-    pkg_data[rpm_type]['pkg_transitive_descendants']={}
-
-    # Map package name to a source rpm file name
-    pkg_data[rpm_type]['sourcerpm']={}
-    pkg_data[rpm_type]['binrpm']={}
-
-    # Map file name to package name
-    pkg_data[rpm_type]['fn_to_name']={}
-
-pkg_data['SRPM']['pkg_direct_requires_rpm']={}
-pkg_data['SRPM']['pkg_transitive_requires_rpm']={}
-
-
-# Return a list of file paths, starting in 'dir', matching 'pattern'
-#    dir= directory to search under
-#    pattern= search for file or directory matching pattern, wildcards allowed
-#    recursive_depth= how many levels of directory before giving up
-def file_search(dir, pattern, recursive_depth=0):
-    match_list = []
-    new_depth = recursive_depth - 1
-    # print "file_search(%s,%s,%s)" % (dir, pattern, recursive_depth)
-    for file in os.listdir(dir):
-        path = "%s/%s" % (dir, file)
-        if fnmatch.fnmatch(file, pattern):
-            print(path)
-            match_list.append(path)
-        elif (recursive_depth > 0) and os.path.isdir(path):
-            sub_list = []
-            sub_list = file_search(path, pattern, recursive_depth=new_depth)
-            match_list.extend(sub_list)
-    return match_list
-
-# Return the list of .../repodate/*primary.xml.gz files
-#    rpm_type= 'RPM' or 'SRPM'
-#    arch= e.g. x86_64, only relevant of rpm_type=='RPM'
-def get_repo_primary_data_list(rpm_type='RPM', arch_list=default_arch_list):
-    rpm_repodata_roots = []
-    repodata_list = []
-
-    if rpm_type == 'RPM':
-        for d in bin_rpm_mirror_roots:
-            if os.path.isdir(d):
-                sub_list = file_search(d, 'repodata', 25)
-                rpm_repodata_roots.extend(sub_list)
-    elif rpm_type == 'SRPM':
-        for d in src_rpm_mirror_roots:
-            if os.path.isdir(d):
-                sub_list = file_search(d, 'repodata', 5)
-                rpm_repodata_roots.extend(sub_list)
-    else:
-        print("invalid rpm_type '%s', valid types are %s" % (rpm_type, str(rpm_types)))
-        return repodata_list
-
-    for d in rpm_repodata_roots:
-        sub_list = file_search(d, '*primary.xml.gz', 2)
-        repodata_list.extend(sub_list)
-   
-    return repodata_list
-
-
-# Return the list of .../repodate/*filelists.xml.gz files
-#    rpm_type= 'RPM' or 'SRPM'
-#    arch= e.g. x86_64, only relevant of rpm_type=='RPM'
-def get_repo_filelists_data_list(rpm_type='RPM', arch_list=default_arch_list):
-    rpm_repodata_roots = []
-    repodata_list = []
-
-    if rpm_type == 'RPM':
-        for d in bin_rpm_mirror_roots:
-            if os.path.isdir(d):
-                sub_list = file_search(d, 'repodata', 25)
-                rpm_repodata_roots.extend(sub_list)
-    elif rpm_type == 'SRPM':
-        for d in src_rpm_mirror_roots:
-            if os.path.isdir(d):
-                sub_list = file_search(d, 'repodata', 5)
-                rpm_repodata_roots.extend(sub_list)
-    else:
-        print "invalid rpm_type '%s', valid types are %s" % (rpm_type, str(rpm_types))
-        return repodata_list
-
-    for d in rpm_repodata_roots:
-       sub_list = file_search(d, '*filelists.xml.gz', 2)
-       repodata_list.extend(sub_list)
-
-    return repodata_list
-
-
-
-# Process a list of repodata files (*filelists.xml.gz) and extract package data.
-# Data is saved to the global 'pkg_data'.
-def read_data_from_repodata_filelists_list(repodata_list, rpm_type='RPM', arch=default_arch):
-    for repodata_path in repodata_list:
-        read_data_from_filelists_xml_gz(repodata_path, rpm_type=rpm_type, arch=arch)
-
-# Process a single repodata file (*filelists.xml.gz) and extract package data.
-# Data is saved to the global 'pkg_data'.
-def read_data_from_filelists_xml_gz(repodata_path, rpm_type='RPM', arch=default_arch):
-    # print "repodata_path=%s" % repodata_path
-    infile = gzip.open(repodata_path)
-    root = ET.parse(infile).getroot()
-    for pkg in root.findall('filelists:package', ns):
-        name=pkg.get('name')
-        pkg_arch=pkg.get('arch')
-
-        version=""
-        release=""
-
-        if arch is not None:
-            if pkg_arch is None:
-                continue
-            if pkg_arch != arch:
-                continue
-
-        v=pkg.find('filelists:version', ns)
-        if v is not None:
-            version=v.get('ver')
-            release=v.get('rel')
-        else:
-            print("%s: %s.%s has no 'filelists:version'" % (repodata_path, name, pkg_arch))
-
-        # print "%s  %s  %s  %s  " % (name, pkg_arch, version,  release)
-
-        for f in pkg.findall('filelists:file', ns):
-            fn=f.text
-            # print "   fn=%s -> plg=%s" % (fn, name)
-            if not name in pkg_data[rpm_type]['files']:
-                pkg_data[rpm_type]['files'][name]=[]
-            pkg_data[rpm_type]['files'][name].append(fn)
-            if not fn in pkg_data[rpm_type]['file_owners']:
-                pkg_data[rpm_type]['file_owners'][fn]=[]
-            pkg_data[rpm_type]['file_owners'][fn]=name
-
-
-
-
-
-# Process a list of repodata files (*primary.xml.gz) and extract package data.
-# Data is saved to the global 'pkg_data'.
-def read_data_from_repodata_primary_list(repodata_list, rpm_type='RPM', arch=default_arch):
-    for repodata_path in repodata_list:
-        read_data_from_primary_xml_gz(repodata_path, rpm_type=rpm_type, arch=arch)
-
-# Process a single repodata file (*primary.xml.gz) and extract package data.
-# Data is saved to the global 'pkg_data'.
-def read_data_from_primary_xml_gz(repodata_path, rpm_type='RPM', arch=default_arch):
-    # print "repodata_path=%s" % repodata_path
-    infile = gzip.open(repodata_path)
-    root = ET.parse(infile).getroot()
-    for pkg in root.findall('root:package', ns):
-        name=pkg.find('root:name', ns).text
-        pkg_arch=pkg.find('root:arch', ns).text
-        version=""
-        release=""
-        license=""
-        sourcerpm=""
-
-        if arch is not None:
-            if pkg_arch is None:
-                continue
-            if pkg_arch != arch:
-                continue
-
-        pkg_data[rpm_type]['providers'][name]=name
-        pkg_data[rpm_type]['files'][name]=[]
-        pkg_data[rpm_type]['requires'][name] = []
-        pkg_data[rpm_type]['requires'][name].append(name)
-
-        url=pkg.find('root:url', ns).text
-        v=pkg.find('root:version', ns)
-        if v is not None:
-            version=v.get('ver')
-            release=v.get('rel')
-        else:
-            print("%s: %s.%s has no 'root:version'" % (repodata_path, name, pkg_arch))
-
-        fn="%s-%s-%s.%s.rpm" % (name, version, release, arch)
-        pkg_data[rpm_type]['fn_to_name'][fn]=name
-
-        # SAL print "%s  %s  %s  %s  " % (name, pkg_arch, version,  release)
-        print("%s  %s  %s  %s  " % (name, pkg_arch, version,  release))
-        f=pkg.find('root:format', ns)
-        if f is not None:
-            license=f.find('rpm:license', ns).text
-            sourcerpm=f.find('rpm:sourcerpm', ns).text
-            if sourcerpm != "":
-                pkg_data[rpm_type]['sourcerpm'][name] = sourcerpm
-            # SAL print "--- requires ---"
-            print("--- requires ---")
-            r=f.find('rpm:requires', ns)
-            if r is not None:
-                for rr in r.findall('rpm:entry', ns):
-                    required_name=rr.get('name')
-                    # SAL print "    %s" % required_name
-                    print "    %s" % required_name
-                    pkg_data[rpm_type]['requires'][name].append(required_name)
-            else:
-                print("%s: %s.%s has no 'rpm:requires'" % (repodata_path, name, pkg_arch))
-            # print "--- provides ---"
-            p=f.find('rpm:provides', ns)
-            if p is not None:
-                for pp in p.findall('rpm:entry', ns):
-                    provided_name=pp.get('name')
-                    # print "    %s" % provided_name
-                    if name == "kernel-rt" and provided_name in pkg_data[rpm_type]['providers'] and pkg_data[rpm_type]['providers'][provided_name] == "kernel":
-                        continue
-                    if name.startswith('kernel-rt'):
-                        alt_name=string.replace(name, 'kernel-rt', 'kernel')
-                        if provided_name in pkg_data[rpm_type]['providers'] and pkg_data[rpm_type]['providers'][provided_name] == alt_name:
-                            continue
-                    pkg_data[rpm_type]['providers'][provided_name]=name
-            else:
-                print("%s: %s.%s has no 'rpm:provides'" % (repodata_path, name, pkg_arch))
-            # print "--- files ---"
-            for fn in f.findall('root:file', ns):
-               file_name=fn.text
-               # print "    %s" % file_name
-               pkg_data[rpm_type]['files'][name].append(file_name)
-               if name == "kernel-rt" and file_name in pkg_data[rpm_type]['file_owners'] and pkg_data[rpm_type]['file_owners'][file_name] == "kernel":
-                   continue
-               if name.startswith('kernel-rt'):
-                   alt_name=string.replace(name, 'kernel-rt', 'kernel')
-                   if provided_name in pkg_data[rpm_type]['file_owners'] and pkg_data[rpm_type]['file_owners'][file_name] == alt_name:
-                       continue
-               pkg_data[rpm_type]['file_owners'][file_name]=name
-        else:
-            print("%s: %s.%s has no 'root:format'" % (repodata_path, name, pkg_arch))
-        # print "%s  %s  %s  %s  %s" % (name, pkg_arch, version,  release, license)
-    infile.close
-    
-def calulate_all_direct_requires_and_descendants(rpm_type='RPM'):
-    # print "calulate_all_direct_requires_and_descendants rpm_type=%s" % rpm_type
-    for name in pkg_data[rpm_type]['requires']:
-        calulate_pkg_direct_requires_and_descendants(name, rpm_type=rpm_type)
-
-def calulate_pkg_direct_requires_and_descendants(name, rpm_type='RPM'):
-    print("%s needs:" % name)
-    if not rpm_type in pkg_data:
-        print("Error: unknown rpm_type '%s'" % rpm_type)
-        return
-
-    if not name in pkg_data[rpm_type]['requires']:
-        print("Note: No requires data for '%s'" % name)
-        return
-
-    for req in pkg_data[rpm_type]['requires'][name]:
-        pro = '???'
-        if rpm_type == 'RPM':
-            if req in pkg_data[rpm_type]['providers']:
-                pro = pkg_data[rpm_type]['providers'][req]
-            elif req in pkg_data[rpm_type]['file_owners']:
-                pro = pkg_data[rpm_type]['file_owners'][req]
-            else:
-                pro = '???'
-                print("package %s has unresolved requirement '%s'" % (name, req))
-        else:
-            #  i.e. rpm_type == 'SRPM'
-            rpm_pro = '???'
-            if req in pkg_data['RPM']['providers']:
-                rpm_pro = pkg_data['RPM']['providers'][req]
-            elif req in pkg_data['RPM']['file_owners']:
-                rpm_pro = pkg_data['RPM']['file_owners'][req]
-            else:
-                rpm_pro = '???'
-                print("package %s has unresolved requirement '%s'" % (name, req))
-
-            if rpm_pro is not None and rpm_pro != '???':
-                if not name in pkg_data[rpm_type]['pkg_direct_requires_rpm']:
-                    pkg_data[rpm_type]['pkg_direct_requires_rpm'][name] = []
-                if not rpm_pro in pkg_data[rpm_type]['pkg_direct_requires_rpm'][name]:
-                    pkg_data[rpm_type]['pkg_direct_requires_rpm'][name].append(rpm_pro)
-
-                if rpm_pro in pkg_data['RPM']['sourcerpm']:
-                    fn = pkg_data['RPM']['sourcerpm'][rpm_pro]
-                    if fn in pkg_data['SRPM']['fn_to_name']:
-                        pro = pkg_data['SRPM']['fn_to_name'][fn]
-                    else:
-                        pro = '???'
-                        print("package %s requires srpm file name %s" % (name,fn))
-                else:
-                    pro = '???'
-                    print("package %s requires rpm %s, but that rpm has no known srpm" % (name,rpm_pro))
-
-        if pro is not None and pro != '???':
-            if not name in pkg_data[rpm_type]['pkg_direct_requires']:
-                pkg_data[rpm_type]['pkg_direct_requires'][name] = []
-            if not pro in pkg_data[rpm_type]['pkg_direct_requires'][name]:
-                pkg_data[rpm_type]['pkg_direct_requires'][name].append(pro)
-            if not pro in pkg_data[rpm_type]['pkg_direct_descendants']:
-                pkg_data[rpm_type]['pkg_direct_descendants'][pro] = []
-            if not name in pkg_data[rpm_type]['pkg_direct_descendants'][pro]:
-                pkg_data[rpm_type]['pkg_direct_descendants'][pro].append(name)
-
-        print("    %s -> %s" % (req, pro))
-
-
-
-def calulate_all_transitive_requires(rpm_type='RPM'):
-    for name in pkg_data[rpm_type]['pkg_direct_requires']:
-        calulate_pkg_transitive_requires(name, rpm_type=rpm_type)
-
-def calulate_pkg_transitive_requires(name, rpm_type='RPM'):
-    if not rpm_type in pkg_data:
-        print("Error: unknown rpm_type '%s'" % rpm_type)
-        return
-
-    if not name in pkg_data[rpm_type]['pkg_direct_requires']:
-        print("Note: No direct_requires data for '%s'" % name)
-        return
-
-    pkg_data[rpm_type]['pkg_transitive_requires'][name]=[]
-    if rpm_type != 'RPM':
-        pkg_data[rpm_type]['pkg_transitive_requires_rpm'][name]=[]
-    unresolved = []
-    unresolved.append(name)
-
-    while unresolved:
-        n = unresolved.pop(0)
-        # print "%s: remove %s" % (name, n)
-        if rpm_type == 'RPM':
-            direct_requires='pkg_direct_requires'
-            transitive_requires='pkg_transitive_requires'
-        else:
-            direct_requires='pkg_direct_requires_rpm'
-            transitive_requires='pkg_transitive_requires_rpm'
-        if n in pkg_data[rpm_type][direct_requires]:
-            for r in pkg_data[rpm_type][direct_requires][n]:
-                if r != name:
-                    if not r in pkg_data[rpm_type][transitive_requires][name]:
-                        pkg_data[rpm_type][transitive_requires][name].append(r)
-                        if r in pkg_data['RPM']['pkg_transitive_requires']:
-                            for r2 in pkg_data['RPM']['pkg_transitive_requires'][r]:
-                                if r2 != name:
-                                    if not r2 in pkg_data[rpm_type][transitive_requires][name]:
-                                        pkg_data[rpm_type][transitive_requires][name].append(r2)
-                        else:
-                            if rpm_type == 'RPM':
-                                unresolved.append(r)
-                            else:
-                                print("WARNING: calulate_pkg_transitive_requires: can't append rpm to SRPM list, name=%s, r=%s" % (name, r))
-                            # print "%s: add %s" % (name, r)
-    if rpm_type != 'RPM':
-        for r in pkg_data[rpm_type]['pkg_transitive_requires_rpm'][name]:
-            if r in pkg_data['RPM']['sourcerpm']:
-                fn = pkg_data['RPM']['sourcerpm'][r]
-                if fn in pkg_data['SRPM']['fn_to_name']:
-                    s = pkg_data['SRPM']['fn_to_name'][fn]
-                    pkg_data[rpm_type]['pkg_transitive_requires'][name].append(s)
-                else:
-                    print("package %s requires srpm file name %s, but srpm name is not known" % (name, fn))
-            else:
-                print("package %s requires rpm %s, but that rpm has no known srpm" % (name, r))
-
-def calulate_all_transitive_descendants(rpm_type='RPM'):
-    for name in pkg_data[rpm_type]['pkg_direct_descendants']:
-        calulate_pkg_transitive_descendants(name, rpm_type=rpm_type)
-
-def calulate_pkg_transitive_descendants(name, rpm_type='RPM'):
-    if not rpm_type in pkg_data:
-        print("Error: unknown rpm_type '%s'" % rpm_type)
-        return
-
-    if not name in pkg_data[rpm_type]['pkg_direct_descendants']:
-        print("Note: No direct_requires data for '%s'" % name)
-        return
-
-    pkg_data[rpm_type]['pkg_transitive_descendants'][name]=[]
-    unresolved = []
-    unresolved.append(name)
-
-    while unresolved:
-        n = unresolved.pop(0)
-        # print "%s: remove %s" % (name, n)
-        if n in pkg_data[rpm_type]['pkg_direct_descendants']:
-            for r in pkg_data[rpm_type]['pkg_direct_descendants'][n]:
-                if r != name:
-                    if not r in pkg_data[rpm_type]['pkg_transitive_descendants'][name]:
-                        pkg_data[rpm_type]['pkg_transitive_descendants'][name].append(r)
-                        if r in pkg_data[rpm_type]['pkg_transitive_descendants']:
-                            for n2 in pkg_data[rpm_type]['pkg_transitive_descendants'][r]:
-                                if n2 != name:
-                                    if not n2 in pkg_data[rpm_type]['pkg_transitive_descendants'][name]:
-                                        pkg_data[rpm_type]['pkg_transitive_descendants'][name].append(n2)
-                        else:
-                            unresolved.append(r)
-                            # print "%s: add %s" % (name, r)
-
-def create_dest_rpm_data():
-    for name in sorted(pkg_data['RPM']['sourcerpm']):
-        fn=pkg_data['RPM']['sourcerpm'][name]
-        if fn in pkg_data['SRPM']['fn_to_name']:
-            sname = pkg_data['SRPM']['fn_to_name'][fn]
-            if not sname in pkg_data['SRPM']['binrpm']:
-                pkg_data['SRPM']['binrpm'][sname]=[]
-            pkg_data['SRPM']['binrpm'][sname].append(name)
-
-def create_cache(cache_dir):
-    for rpm_type in rpm_types:
-        print("")
-        print("==== %s ====" % rpm_type)
-        print("")
-        rpm_repodata_primary_list = get_repo_primary_data_list(rpm_type=rpm_type, arch_list=default_arch_by_type[rpm_type])
-        for arch in default_arch_by_type[rpm_type]:
-            read_data_from_repodata_primary_list(rpm_repodata_primary_list, rpm_type=rpm_type, arch=arch)
-        rpm_repodata_filelists_list = get_repo_filelists_data_list(rpm_type=rpm_type, arch_list=default_arch_by_type[rpm_type])
-        for arch in default_arch_by_type[rpm_type]:
-            read_data_from_repodata_filelists_list(rpm_repodata_filelists_list, rpm_type=rpm_type, arch=arch)
-        calulate_all_direct_requires_and_descendants(rpm_type=rpm_type)
-        calulate_all_transitive_requires(rpm_type=rpm_type)
-        calulate_all_transitive_descendants(rpm_type=rpm_type)
-
-        cache_name="%s/%s-direct-requires" % (cache_dir, rpm_type)
-        f=open(cache_name, "w")
-        for name in sorted(pkg_data[rpm_type]['pkg_direct_requires']):
-            print("%s needs %s" % (name, pkg_data[rpm_type]['pkg_direct_requires'][name]))
-            f.write("%s;" % name)
-            first=True
-            for req in sorted(pkg_data[rpm_type]['pkg_direct_requires'][name]):
-                if first:
-                    first=False
-                    f.write("%s" % req)
-                else:
-                    f.write(",%s" % req)
-            f.write("\n")
-        f.close()
-
-        cache_name="%s/%s-direct-descendants" % (cache_dir, rpm_type)
-        f=open(cache_name, "w")
-        for name in sorted(pkg_data[rpm_type]['pkg_direct_descendants']):
-            print("%s informs %s" % (name, pkg_data[rpm_type]['pkg_direct_descendants'][name]))
-            f.write("%s;" % name)
-            first=True
-            for req in sorted(pkg_data[rpm_type]['pkg_direct_descendants'][name]):
-                if first:
-                    first=False
-                    f.write("%s" % req)
-                else:
-                    f.write(",%s" % req)
-            f.write("\n")
-        f.close()
-
-        cache_name="%s/%s-transitive-requires" % (cache_dir, rpm_type)
-        f=open(cache_name, "w")
-        for name in sorted(pkg_data[rpm_type]['pkg_transitive_requires']):
-            f.write("%s;" % name)
-            first=True
-            for req in sorted(pkg_data[rpm_type]['pkg_transitive_requires'][name]):
-                if first:
-                    first=False
-                    f.write("%s" % req)
-                else:
-                    f.write(",%s" % req)
-            f.write("\n")
-        f.close()
-
-        cache_name="%s/%s-transitive-descendants" % (cache_dir, rpm_type)
-        f=open(cache_name, "w")
-        for name in sorted(pkg_data[rpm_type]['pkg_transitive_descendants']):
-            f.write("%s;" % name)
-            first=True
-            for req in sorted(pkg_data[rpm_type]['pkg_transitive_descendants'][name]):
-                if first:
-                    first=False
-                    f.write("%s" % req)
-                else:
-                    f.write(",%s" % req)
-            f.write("\n")
-        f.close()
-
-        if rpm_type != 'RPM':
-            cache_name="%s/%s-direct-requires-rpm" % (cache_dir, rpm_type)
-            f=open(cache_name, "w")
-            for name in sorted(pkg_data[rpm_type]['pkg_direct_requires_rpm']):
-                print("%s needs rpm %s" % (name, pkg_data[rpm_type]['pkg_direct_requires_rpm'][name]))
-                f.write("%s;" % name)
-                first=True
-                for req in sorted(pkg_data[rpm_type]['pkg_direct_requires_rpm'][name]):
-                    if first:
-                        first=False
-                        f.write("%s" % req)
-                    else:
-                        f.write(",%s" % req)
-                f.write("\n")
-            f.close()
-
-            cache_name="%s/%s-transitive-requires-rpm" % (cache_dir, rpm_type)
-            f=open(cache_name, "w")
-            for name in sorted(pkg_data[rpm_type]['pkg_transitive_requires_rpm']):
-                f.write("%s;" % name)
-                first=True
-                for req in sorted(pkg_data[rpm_type]['pkg_transitive_requires_rpm'][name]):
-                    if first:
-                        first=False
-                        f.write("%s" % req)
-                    else:
-                        f.write(",%s" % req)
-                f.write("\n")
-            f.close()
-
-    cache_name="%s/rpm-to-srpm" % cache_dir
-    f=open(cache_name, "w")
-    for name in sorted(pkg_data['RPM']['sourcerpm']):
-        f.write("%s;" % name)
-        fn=pkg_data['RPM']['sourcerpm'][name]
-        if fn in pkg_data['SRPM']['fn_to_name']:
-            sname = pkg_data['SRPM']['fn_to_name'][fn]
-            f.write("%s" % sname)
-        f.write("\n")
-    f.close()
-
-    create_dest_rpm_data()
-    cache_name="%s/srpm-to-rpm" % cache_dir
-    f=open(cache_name, "w")
-    for name in sorted(pkg_data['SRPM']['binrpm']):
-        f.write("%s;" % name)
-        first=True
-        for bname in sorted(pkg_data['SRPM']['binrpm'][name]):
-            if first:
-                first=False
-                f.write("%s" % bname)
-            else:
-                f.write(",%s" % bname)
-        f.write("\n")
-    f.close()
-
-
-    
-def test():
-    for rpm_type in rpm_types:
-        print("")
-        print("==== %s ====" % rpm_type)
-        print("")
-        rpm_repodata_primary_list = get_repo_primary_data_list(rpm_type=rpm_type, arch_list=default_arch_by_type[rpm_type])
-        for arch in default_arch_by_type[rpm_type]:
-            read_data_from_repodata_primary_list(rpm_repodata_primary_list, rpm_type=rpm_type, arch=arch)
-        rpm_repodata_filelists_list = get_repo_filelists_data_list(rpm_type=rpm_type, arch_list=default_arch_by_type[rpm_type])
-        for arch in default_arch_by_type[rpm_type]:
-            read_data_from_repodata_filelists_list(rpm_repodata_filelists_list, rpm_type=rpm_type, arch=arch)
-        calulate_all_direct_requires_and_descendants(rpm_type=rpm_type)
-        calulate_all_transitive_requires(rpm_type=rpm_type)
-        calulate_all_transitive_descendants(rpm_type=rpm_type)
-
-        for name in pkg_data[rpm_type]['pkg_direct_requires']:
-            print("%s needs %s" % (name, pkg_data[rpm_type]['pkg_direct_requires'][name]))
-
-        for name in pkg_data[rpm_type]['pkg_direct_descendants']:
-            print("%s informs %s" % (name, pkg_data[rpm_type]['pkg_direct_descendants'][name]))
-
-        for name in pkg_data[rpm_type]['pkg_transitive_requires']:
-            print("%s needs %s" % (name, pkg_data[rpm_type]['pkg_transitive_requires'][name]))
-            print("")
-     
-        for name in pkg_data[rpm_type]['pkg_transitive_descendants']:
-            print("%s informs %s" % (name, pkg_data[rpm_type]['pkg_transitive_descendants'][name]))
-            print("")
-
-
-if os.path.isdir(publish_cache_dir):
-   create_cache(publish_cache_dir)
-else:
-   print("ERROR: Directory not found '%s" % publish_cache_dir)
diff --git a/build-tools/default_build_srpm b/build-tools/default_build_srpm
deleted file mode 100755
index fe99ad6c..00000000
--- a/build-tools/default_build_srpm
+++ /dev/null
@@ -1,277 +0,0 @@
-#!/bin/bash
-# set -x
-
-#
-# Copyright (c) 2018-2020 Wind River Systems, Inc.
-#
-# SPDX-License-Identifier: Apache-2.0
-#
-
-source "$SRC_BASE/build-tools/spec-utils"
-source "$SRC_BASE/build-tools/srpm-utils"
-
-CUR_DIR=`pwd`
-BUILD_DIR="$RPMBUILD_BASE"
-
-if [ "x$DATA" == "x" ]; then
-   echo "ERROR: default_build_srpm (${LINENO}): Environment variable 'DATA' not defined."
-   exit 1
-fi
-
-srpm_source_build_data "$DATA" "$SRC_BUILD_TYPE" "$SRPM_OR_SPEC_PATH"
-if [ $? -ne 0 ]; then
-    echo "ERROR: default_build_srpm (${LINENO}): Failed to source build data from $DATA"
-    exit 1
-fi
-
-if [ "x$PBR_VERSION" != "x" ] && [ "x$PBR_VERSION" != "xNA" ]; then
-    VERSION=$PBR_VERSION
-fi
-
-if [ "x$VERSION" == "x" ]; then
-    for SPEC in `find $SPECS_BASE -name '*.spec' | sort -V`; do
-       SPEC_PATH="$SPEC"
-
-       VERSION_DERIVED=`spec_evaluate '%{version}' "$SPEC_PATH" 2>> /dev/null`
-       if [ $? -ne 0 ]; then
-           echo "ERROR: default_build_srpm (${LINENO}): '%{version}' not found in '$PKG_BASE/$SPEC_PATH'"
-           VERSION_DERIVED=""
-       fi
-
-       if [ "x$VERSION_DERIVED" != "x" ]; then
-          if [ "x$VERSION" == "x" ]; then
-             VERSION=$VERSION_DERIVED
-          else
-             if [ "x$SRC_DIR" != "x" ]; then
-                echo "ERROR: default_build_srpm (${LINENO}): multiple spec files found, can't set VERSION automatically"
-                exit 1
-             fi
-          fi
-       fi
-    done
-
-    if [ "x$VERSION" == "x" ]; then
-       if [ -f $SRC_DIR/PKG-INFO ]; then
-          VERSION=$(grep '^Version:' $SRC_DIR/PKG-INFO | awk -F ': ' '{print $2}' | sed -e 's/^[[:space:]]*//')
-       fi
-    fi
-
-    if [ "x$VERSION" != "x" ]; then
-        echo "Derived VERSION=$VERSION"
-    else
-        echo "ERROR: default_build_srpm (${LINENO}): Failed to derive a good VERSION from SPEC file, and none provided."
-        exit 1
-    fi
-fi
-
-if [ "x$TAR_NAME" == "x" ]; then
-    for SPEC in `find $SPECS_BASE -name '*.spec' | sort -V`; do
-       SPEC_PATH="$SPEC"
-
-       SERVICE=`spec_find_global service "$SPEC_PATH" 2>> /dev/null`
-       if [ $? -eq 0 ]; then
-          if [ "x$TAR_NAME" == "x" ]; then
-             TAR_NAME=$SERVICE
-          else
-             if [ "x$SRC_DIR" != "x" ]; then
-                echo "ERROR: default_build_srpm (${LINENO}): multiple spec files found, can't set TAR_NAME automatically"
-                exit 1
-             fi
-          fi
-       else
-          NAME=`spec_find_tag Name "$SPEC_PATH" 2>> /dev/null`
-          if [ $? -eq 0 ]; then
-             if [ "x$TAR_NAME" == "x" ]; then
-                TAR_NAME=$NAME
-             else
-                if [ "x$SRC_DIR" != "x" ]; then
-                   echo "ERROR: default_build_srpm (${LINENO}): multiple spec files found, can't set TAR_NAME automatically"
-                   exit 1
-                fi
-             fi
-          else
-             echo "WARNING: default_build_srpm (${LINENO}): 'Name' not found in '$SPEC_PATH'"
-             NAME=""
-          fi
-       fi
-    done
-
-    if [ "x$TAR_NAME" == "x" ]; then
-        if [ -f $SRC_DIR/PKG-INFO ]; then
-            TAR_NAME=$(grep '^Name:' $SRC_DIR/PKG-INFO | awk -F ': ' '{print $2}' | sed -e 's/^[[:space:]]*//')
-        fi
-    fi
-
-    if [ "x$TAR_NAME" != "x" ]; then
-        echo "Derived TAR_NAME=$TAR_NAME"
-    else
-        echo "ERROR: default_build_srpm (${LINENO}): Failed to derive a good TAR_NAME from SPEC file, and none provided."
-        exit 1
-    fi
-fi
-
-if [ "x$TAR" == "x" ]; then
-    TAR="$TAR_NAME-$VERSION.tar.gz"
-fi
-
-SOURCE_PATH="$BUILD_DIR/SOURCES"
-TAR_PATH="$SOURCE_PATH/$TAR"
-STAGING=""
-
-if [ "x$COPY_LIST_TO_TAR" != "x" ] || [ "x$EXCLUDE_LIST_FROM_TAR" != "x" ]; then
-	STAGING="$BUILD_DIR/staging"
-	mkdir -p $STAGING
-fi
-
-mkdir -p "$BUILD_DIR/SRPMS"
-mkdir -p "$SOURCE_PATH"
-
-if [ "x$SRC_DIR" == "x" -a "x$COPY_LIST" == "x" -a "$ALLOW_EMPTY_RPM" != "true" ]; then
-   echo "ERROR: default_build_srpm (${LINENO}): '$PWD/$DATA' failed to provide at least one of 'SRC_DIR' or 'COPY_LIST'"
-   exit 1
-fi
-
-if [ "x$SRC_DIR" != "x" ]; then
-   if [ ! -d "$SRC_DIR" ]; then
-      echo "ERROR: default_build_srpm (${LINENO}): directory not found: '$SRC_DIR'"
-      exit 1
-   fi
-fi
-
-if [ "x$COPY_LIST" != "x" ]; then
-   echo "COPY_LIST: $COPY_LIST"
-   for p in $COPY_LIST; do
-      # echo "COPY_LIST: $p"
-      \cp -L -u -r -v $p $SOURCE_PATH
-      if [ $? -ne 0 ]; then
-         echo "ERROR: default_build_srpm (${LINENO}): COPY_LIST: file not found: '$p'"
-         exit 1
-      fi
-   done
-fi
-
-if [ "x$STAGING" != "x" ]; then
-   \cp -L -u -r -v $SRC_DIR $STAGING
-   echo "COPY_LIST_TO_TAR: $COPY_LIST_TO_TAR"
-   for p in $COPY_LIST_TO_TAR; do
-      # echo "COPY_LIST_TO_TAR: $p"
-      \cp -L -u -r -v $p $STAGING/$SRC_DIR
-      if [ $? -ne 0 ]; then
-         echo "ERROR: default_build_srpm (${LINENO}): COPY_LIST_TO_TAR: file not found: '$p'"
-         exit 1
-      fi
-   done   
-   echo "EXCLUDE_LIST_FROM_TAR: $EXCLUDE_LIST_FROM_TAR"
-   for p in $EXCLUDE_LIST_FROM_TAR; do
-      # echo "EXCLUDE_LIST_FROM_TAR: $p"
-      echo "rm -rf $STAGING/$SRC_DIR/$p"
-      \rm -rf $STAGING/$SRC_DIR/$p
-      if [ $? -ne 0 ]; then
-         echo "ERROR: default_build_srpm (${LINENO}): EXCLUDE_LIST_FROM_TAR: could not remove file: '$p'"
-         exit 1
-      fi
-   done   
-   
-fi
-
-TRANSFORM=`echo "$SRC_DIR" | sed 's/^\./\\./' | sed 's:^/::' | sed 's#^.*/\.\./##'`
-
-if [ "x$STAGING" != "x" ]; then
-	pushd $STAGING
-fi
-
-TAR_NEEDED=0
-if [ "x$SRC_DIR" != "x" ]; then
-    echo "SRC_DIR=$SRC_DIR"
-    if [ -f $TAR_PATH ]; then
-        n=`find . -cnewer $TAR_PATH -and ! -path './.git*' \
-                                    -and ! -path './build/*' \
-                                    -and ! -path './.pc/*' \
-                                    -and ! -path './patches/*' \
-                                    -and ! -path "./$DISTRO/*" \
-                                    -and ! -path './pbr-*.egg/*' \
-                                    | wc -l`
-        if [ $n -gt 0 ]; then
-            TAR_NEEDED=1
-        fi
-    else
-        TAR_NEEDED=1
-    fi
-fi
-
-if [ $TAR_NEEDED -gt 0 ]; then
-    echo "Creating tar file: $TAR_PATH ..."
-    echo "tar --exclude '.git*' --exclude 'build' --exclude='.pc' --exclude='patches' --exclude='$SRC_DIR/$DISTRO' --exclude='pbr-*.egg' --transform 's,^$TRANSFORM,$TAR_NAME-$VERSION,' -czf $TAR_PATH $SRC_DIR"
-    tar --exclude '.git*' --exclude 'build' --exclude='.pc' --exclude='patches' --exclude="$SRC_DIR/$DISTRO" --exclude='pbr-*.egg' --transform "s,^$TRANSFORM,$TAR_NAME-$VERSION," -czf "$TAR_PATH" "$SRC_DIR"
-    if [ $? -ne 0 ]; then
-		if [ "x$STAGING" != "x" ]; then
-			popd
-		fi
-    
-        echo "ERROR: default_build_srpm (${LINENO}): failed to create tar file, cmd: tar --exclude '.git*' --exclude 'build' --exclude='.pc' --exclude='patches' --exclude="$SRC_DIR/$DISTRO" --exclude='pbr-*.egg' --transform \"s,^$TRANSFORM,$TAR_NAME-$VERSION,\" -czf '$TAR_PATH' '$SRC_DIR'"
-        exit 1
-    fi
-    echo "Created tar file: $TAR_PATH"
-else
-    echo "Tar file not needed."
-fi
-
-if [ "x$STAGING" != "x" ]; then
-	popd
-fi
-
-if [ ! -d $BUILD_DIR/SPECS ]; then
-    echo "Spec directory '$BUILD_DIR/SPECS' does not exist"
-    exit 1
-fi
-
-if [ $(ls -1 $BUILD_DIR/SPECS/*.spec | wc -l) -eq 0 ]; then
-    echo "No spec files found in spec directory '$BUILD_DIR/SPECS'"
-    exit 1
-fi
-
-for SPEC in `ls -1 $BUILD_DIR/SPECS`; do
-    SPEC_PATH="$BUILD_DIR/SPECS/$SPEC"
-    RELEASE=`spec_find_tag Release "$SPEC_PATH" 2>> /dev/null`
-    if [ $? -ne 0 ]; then
-        echo "ERROR: default_build_srpm (${LINENO}): 'Release' not found in '$SPEC_PATH'"
-    fi
-    NAME=`spec_find_tag Name "$SPEC_PATH" 2>> /dev/null`
-    if [ $? -ne 0 ]; then
-        echo "ERROR: default_build_srpm (${LINENO}): 'Name' not found in '$SPEC_PATH'"
-    fi
-    SRPM="$NAME-$VERSION-$RELEASE.src.rpm"
-    SRPM_PATH="$BUILD_DIR/SRPMS/$SRPM"
-
-    spec_validate_tis_release $SPEC_PATH
-    if [ $? -ne 0 ]; then
-        echo "TIS Validation of $SPEC_PATH failed"
-        exit 1
-    fi
-
-    BUILD_NEEDED=0
-    if [ -f $SRPM_PATH ]; then
-        n=`find . -cnewer $SRPM_PATH | wc -l`
-        if [ $n -gt 0 ]; then
-            BUILD_NEEDED=1
-        fi
-    else
-        BUILD_NEEDED=1
-    fi
-
-    if [ $BUILD_NEEDED -gt 0 ]; then
-        echo "SPEC file: $SPEC_PATH"
-        echo "SRPM build directory: $BUILD_DIR"
-        echo "TIS_PATCH_VER: $TIS_PATCH_VER"
-        echo "PBR_VERSION: $PBR_VERSION"
-
-        sed -i -e "1 i%define _tis_build_type $BUILD_TYPE" $SPEC_PATH
-        sed -i -e "1 i%define tis_patch_ver $TIS_PATCH_VER" $SPEC_PATH
-        sed -i -e "1 i%define pbr_version $PBR_VERSION" $SPEC_PATH
-        rpmbuild -bs $SPEC_PATH --define="%_topdir $BUILD_DIR"  --undefine=dist --define="_tis_dist .tis"
-    else
-        echo "SRPM build not needed"
-    fi
-done
-
-
diff --git a/build-tools/find_klm b/build-tools/find_klm
deleted file mode 100755
index f1604994..00000000
--- a/build-tools/find_klm
+++ /dev/null
@@ -1,59 +0,0 @@
-#!/bin/bash
-
-for r in $(find $MY_WORKSPACE/*/rpmbuild/RPMS -name '*.rpm'); do
-   f=$(basename $r)
-   find  $MY_WORKSPACE/export/dist/isolinux/Packages | grep $f >> /dev/null
-   if [ $? -ne 0 ]; then
-         continue
-   fi
-   n=$(rpm -q --qf='%{NAME}\n' -p $r)
-   d=$(dirname $r)
-# echo "f=$f"
-   for f in $(rpm -q -p -l $r | grep '[.]ko$' | head -n 1); do
-      FOUND=0 
-      s=$(rpm -q --info -p $r | grep 'Source RPM  :' | awk -F: '{print $2}' | tr -d '[[:space:]]')
-      NAME=$(rpm -q --qf='%{NAME}\n' -p $d/$s)
-# echo "NAME=$NAME"
-      for s2 in $(find $MY_WORKSPACE/*/rpmbuild/SRPMS -name "$NAME-[0-9]*.src.rpm"); do
-         NAME2=$(rpm -q --qf='%{NAME}\n' -p $s2)
-# echo "NAME2=$NAME2"
-         if [ "${NAME}" == "${NAME2}" ]; then
-            echo $NAME | grep '[-]rt' >> /dev/null
-            if [ $? -ne 0 ]; then
-               echo $NAME
-               FOUND=1
-               break
-            fi
-            # SIMPLE_NAME=$(echo $NAME | sed 's#-kmod##' | sed 's#-kernel##' | sed 's#^kernel$#linux#'   | sed 's#^kernel-rt$#linux-rt#')
-            SIMPLE_NAME=$(echo $NAME | sed 's#^kernel$#linux#'   | sed 's#^kernel-rt$#linux-rt#')
-# echo "SIMPLE_NAME=$SIMPLE_NAME"
-            grep "[/]$SIMPLE_NAME$" $(for g in $(find $MY_REPO -type d -name .git); do d=$(dirname $g); find $d -name 'centos_pkg_dirs*'; done) >> /dev/null
-            if [ $? -eq 0 ]; then
-               echo $NAME
-               FOUND=1
-               break
-            fi
-            SIMPLE_NAME=$(echo $NAME | sed 's#-rt$##' )
-# echo "SIMPLE_NAME=$SIMPLE_NAME"
-            grep "[/]$SIMPLE_NAME$" $(for g in $(find $MY_REPO -type d -name .git); do d=$(dirname $g); find $d -name 'centos_pkg_dirs*'; done) >> /dev/null
-            if [ $? -eq 0 ]; then
-               echo $SIMPLE_NAME
-               FOUND=1
-               break
-            fi
-            SIMPLE_NAME2=$(echo $SIMPLE_NAME | sed 's#-kmod##' )
-# echo "SIMPLE_NAME2=$SIMPLE_NAME2"
-            grep "[/-]$SIMPLE_NAME2$" $(for g in $(find $MY_REPO -type d -name .git); do d=$(dirname $g); find $d -name 'centos_pkg_dirs*'; done) >> /dev/null
-            if [ $? -eq 0 ]; then
-               echo $SIMPLE_NAME
-               FOUND=1
-               break
-            fi
-         fi
-       done
-       if [ $FOUND -eq 1 ]; then
-          break
-       fi
-   done
-# done
-done | sort --unique
diff --git a/build-tools/find_patched_srpms_needing_upgrade b/build-tools/find_patched_srpms_needing_upgrade
deleted file mode 100755
index a57dc66a..00000000
--- a/build-tools/find_patched_srpms_needing_upgrade
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/bin/bash
-
-for f in `find $MY_REPO -name srpm_path`; do
-   orig_line=`cat $f`
-   first=`echo $orig_line | awk -F : '{print $1}'`
-   orig_path="/import/mirrors/$orig_line"
-   if [ "$first" == "mirror" ]; then
-      orig_path="/import/mirrors/"$(echo $orig_line | awk -F : '{print $2}');
-   fi
-   if [ "$first" == "repo" ]; then
-      orig_path="$MY_REPO/"$(echo $orig_line | awk -F : '{print $2}')
-      continue
-   fi
-
-   if [ ! -f $orig_path ]; then
-      echo "ERROR: bad srpm path: '$orig_path' derived from '$f'"
-      exit 1
-   fi
-
-   orig_dir=$(dirname $orig_path)
-   repodata_dir=$orig_dir/repodata
-   if [ ! -d $repodata_dir ]; then
-      repodata_dir=$orig_dir/../repodata
-      if [ ! -d $repodata_dir ]; then
-         repodata_dir=$orig_dir/../../repodata
-         if [ ! -d $repodata_dir ]; then
-            echo "ERROR: couldn't find repodata for '$orig_path'"
-            exit 1
-         fi
-      fi
-   fi
-
-   # echo "'$orig_path' -> '$repodata_dir'"
-   name=$(rpm -q --queryformat '%{NAME}\n' -p $orig_path 2>> /dev/null)
-   version=$(rpm -q --queryformat '%{VERSION}\n' -p $orig_path 2>> /dev/null)
-   release=$(rpm -q --queryformat '%{RELEASE}\n' -p $orig_path 2>> /dev/null)
-   orig_name=$(basename $orig_path)
-   best_name="$orig_name"
-   for n in `find $orig_dir -name $name-*`; do
-      if [ "$n" != "$orig_path" ]; then
-         new_name=$(rpm -q --queryformat '%{NAME}\n' -p $n)
-         if [ "$name" == "$new_name" ]; then
-            rpmdev-vercmp $(basename $n) $best_name >> /dev/null
-            if [ $? -eq 11 ]; then
-               best_name=$(basename $n)
-            fi
-         fi
-      fi
-   done
-   if [ "$best_name" != "$orig_name" ]; then
-      echo "$f: $orig_name ==> $best_name"
-   fi
-done
-
diff --git a/build-tools/helm_chart_modify.py b/build-tools/helm_chart_modify.py
index bd8d6720..2bb01e49 100755
--- a/build-tools/helm_chart_modify.py
+++ b/build-tools/helm_chart_modify.py
@@ -39,14 +39,14 @@
 #     list-of-image-record-files: one or more files containing image records
 #
 #     e.g.
-#     cat $MY_WORKSPACE/std/build-images/images-centos-stable-versioned.lst
-#     docker.io/starlingx/stx-keystone-api-proxy:master-centos-stable-20200811T002300Z.0
-#     docker.io/starlingx/stx-nova-client:master-centos-stable-20200811T002300Z.0
+#     cat $MY_WORKSPACE/std/build-images/images-debian-stable-versioned.lst
+#     docker.io/starlingx/stx-keystone-api-proxy:master-debian-stable-20200811T002300Z.0
+#     docker.io/starlingx/stx-nova-client:master-debian-stable-20200811T002300Z.0
 #     ...
 #
 # Sample usage:
 #    helm_chart_modify.py <input-yaml-file> <output-yaml-file> \
-#         $MY_WORKSPACE/std/build-images/images-centos-stable-versioned.lst
+#         $MY_WORKSPACE/std/build-images/images-debian-stable-versioned.lst
 
 import collections
 import sys
diff --git a/build-tools/image-utils.sh b/build-tools/image-utils.sh
index cda4802c..3d71736d 100755
--- a/build-tools/image-utils.sh
+++ b/build-tools/image-utils.sh
@@ -50,7 +50,7 @@ get_bsp_dir () {
 # Parameters:
 #    build_target: One of 'iso', 'guest' ...
 #    list_type:    One of 'std', 'dev', 'layer'
-#    distro:       One of 'centos', ...
+#    distro:       One of 'debian', ...
 #    layer:        One of 'compiler', 'distro', 'flock', ...
 #                  Only required if list_type == layer
 #
@@ -68,7 +68,7 @@ image_inc_list () {
     if [ "${list_type}" = "layer" ]; then
         local required_layer_cfg_name="required_layer_${build_target}_inc.cfg"
         local layer_cfg_name="${distro}_build_layer.cfg"
-        local root_dir="${MY_REPO}/../stx-tools/centos-mirror-tools/config/${distro}/${layer}"
+        local root_dir="${MY_REPO}/../stx-tools/${distro}-mirror-tools/config/${distro}/${layer}"
         local layer_cfgs=""
 
         layer_cfgs=$(find $(for x in $GIT_LIST; do echo $x/; done) -maxdepth 1 -name ${layer_cfg_name})
diff --git a/build-tools/ip_report.py b/build-tools/ip_report.py
deleted file mode 100755
index ec5de2e0..00000000
--- a/build-tools/ip_report.py
+++ /dev/null
@@ -1,523 +0,0 @@
-#!/usr/bin/python
-
-import csv
-import os
-import rpm
-import shutil
-import subprocess
-import sys
-import getopt
-
-
-class BinPackage(object):
-    def __init__(self, path, ts):
-        fdno = os.open(path, os.O_RDONLY)
-        hdr = ts.hdrFromFdno(path)
-        os.close(fdno)
-
-        self.source = hdr[rpm.RPMTAG_SOURCERPM]
-        self.desc = hdr[rpm.RPMTAG_DESCRIPTION].replace('\n', ' ')
-        self.dirname = os.path.dirname(path)
-        self.filename = os.path.basename(path)
-        self.path = path
-        self.kernel_module = False
-        self.name = hdr[rpm.RPMTAG_NAME]
-
-        # Does the package contain kernel modules?
-        for filename in hdr[rpm.RPMTAG_BASENAMES]:
-            assert isinstance(filename, basestring)
-            if filename.endswith('.ko'):
-                self.kernel_module = True
-                break
-
-
-class SrcPackage(object):
-    def __init__(self, path=None):
-        self.bin_pkg = None
-        self.original_src = None
-        self.sha = 'SHA'
-        if path is None:
-            self.filename = None
-            self.path = None
-        else:
-            self.filename = os.path.basename(path)
-            self.path = path
-            ts = rpm.TransactionSet()
-            ts.setVSFlags(rpm._RPMVSF_NODIGESTS | rpm._RPMVSF_NOSIGNATURES)
-            fdno = os.open(self.path, os.O_RDONLY)
-            hdr = ts.hdrFromFdno(self.path)
-            os.close(fdno)
-            self.desc = hdr[rpm.RPMTAG_DESCRIPTION].replace('\n', ' ')
-            self.version = hdr[rpm.RPMTAG_VERSION] + '-' + hdr[rpm.RPMTAG_RELEASE]
-            self.licences = hdr[rpm.RPMTAG_LICENSE]
-            self.name = hdr[rpm.RPMTAG_NAME]
-            self.url = hdr[rpm.RPMTAG_URL]
-
-        self.modified = None
-        self.kernel_module = False
-        self.disclosed_by = 'Jason McKenna'
-        self.shipped_as = 'Binary'
-        self.origin = 'Unknown'
-        self.notes = ''
-        self.wrs = False
-
-    def __lt__(self, other):
-        me = self.name.lower()
-        them = other.name.lower()
-        if me == them:
-            return self.name < other.name
-        else:
-            return me < them
-
-
-class IPReport(object):
-    __KNOWN_PATHS = [
-        # CentOS 7.4
-        ['/import/mirrors/CentOS/7.4.1708/os/Source/SPackages',
-         'http://vault.centos.org/7.4.1708/os/Source/SPackages'],
-        ['/import/mirrors/CentOS/vault.centos.org/7.4.1708/updates/Source/SPackages',
-         'http://vault.centos.org/7.4.1708/updates/Source/SPackages'],
-        ['/import/mirrors/CentOS/vault.centos.org/7.4.1708/cloud/Source/openstack-newton/common',
-         'http://vault.centos.org/7.4.1708/cloud/Source/openstack-newton/common'],
-        ['/import/mirrors/CentOS/vault.centos.org/7.4.1708/cloud/Source/openstack-newton',
-         'http://vault.centos.org/7.4.1708/cloud/Source/openstack-newton'],
-        ['/import/mirrors/CentOS/vault.centos.org/7.4.1708/cloud/Source/openstack-mitaka/common',
-         'http://vault.centos.org/7.4.1708/cloud/Source/openstack-mitaka/common'],
-        ['/import/mirrors/CentOS/vault.centos.org/7.4.1708/cloud/Source/openstack-mitaka',
-         'http://vault.centos.org/7.4.1708/cloud/Source/openstack-mitaka'],
-        ['/import/mirrors/CentOS/7.4.1708/extras/Source/SPackages',
-         'http://vault.centos.org/7.4.1708/extras/Source/SPackages'],
-        # CentOS 7.3
-        ['/import/mirrors/CentOS/7.3.1611/os/Source/SPackages',
-         'http://vault.centos.org/7.3.1611/os/Source/SPackages'],
-        ['/import/mirrors/CentOS/vault.centos.org/7.3.1611/updates/Source/SPackages',
-         'http://vault.centos.org/7.3.1611/updates/Source/SPackages'],
-        ['/import/mirrors/CentOS/vault.centos.org/7.3.1611/cloud/Source/openstack-newton/common',
-         'http://vault.centos.org/7.3.1611/cloud/Source/openstack-newton/common'],
-        ['/import/mirrors/CentOS/vault.centos.org/7.3.1611/cloud/Source/openstack-newton',
-         'http://vault.centos.org/7.3.1611/cloud/Source/openstack-newton'],
-        ['/import/mirrors/CentOS/vault.centos.org/7.3.1611/cloud/Source/openstack-mitaka/common',
-         'http://vault.centos.org/7.3.1611/cloud/Source/openstack-mitaka/common'],
-        ['/import/mirrors/CentOS/vault.centos.org/7.3.1611/cloud/Source/openstack-mitaka',
-         'http://vault.centos.org/7.3.1611/cloud/Source/openstack-mitaka'],
-        ['/import/mirrors/CentOS/7.3.1611/extras/Source/SPackages',
-         'http://vault.centos.org/7.3.1611/extras/Source/SPackages'],
-        # CentOS 7.2
-        ['/import/mirrors/CentOS/7.2.1511/os/Source/SPackages', 'http://vault.centos.org/7.2.1511/os/Source/SPackages'],
-        ['/import/mirrors/CentOS/vault.centos.org/7.2.1511/updates/Source/SPackages',
-         'http://vault.centos.org/7.2.1511/updates/Source/SPackages'],
-        ['/import/mirrors/CentOS/vault.centos.org/7.2.1511/cloud/Source/openstack-mitaka/common',
-         'http://vault.centos.org/7.2.1511/cloud/Source/openstack-mitaka/common'],
-        ['/import/mirrors/CentOS/vault.centos.org/7.2.1511/cloud/Source/openstack-mitaka',
-         'http://vault.centos.org/7.2.1511/cloud/Source/openstack-mitaka'],
-        ['/import/mirrors/CentOS/7.2.1511/extras/Source/SPackages',
-         'http://vault.centos.org/7.2.1511/extras/Source/SPackages'],
-        ['/import/mirrors/CentOS/tis-r4-CentOS/newton/Source', 'Unknown'],
-        ['/import/mirrors/CentOS/tis-r4-CentOS/tis-r4-3rd-Party', 'Unknown']
-
-        ]
-
-    def __init__(self, workspace=None, repo=None):
-        self.workspace = None
-        self.repo = None
-        self.shipped_binaries = list()
-        self.built_binaries = list()
-        self.check_env()
-        if workspace is not None:
-            self.workspace = workspace
-        if repo is not None:
-            self.repo = repo
-
-        # Generate a list of binaries that we shipped
-        for filename in os.listdir(self.workspace + '/export/dist/isolinux/Packages'):
-            if filename.endswith('rpm'):
-                self.shipped_binaries.append(filename)
-
-        # Generate a list of binaries that we built ourselves
-        for build in ['rt', 'std']:
-            for filename in os.listdir(self.workspace + '/' + build + '/rpmbuild/RPMS/'):
-                if filename.endswith('rpm'):
-                    self.built_binaries.append(filename)
-
-        print('Looking up packages for which we have source...')
-        self.original_src_pkgs = dict()
-        self.build_original_src_pkgs()
-        print('Looking up packages we built...')
-        self.built_src_pkgs = dict()
-        self.build_built_src_pkgs()
-        print('Looking up packages we built...')
-        self.hardcoded_lookup_dict = dict()
-        self.build_hardcoded_lookup_dict()
-
-    def build_hardcoded_lookup_dict(self):
-        with open(self.repo + '/build-tools/source_lookup.txt', 'r') as lookup_file:
-            for line in lookup_file:
-                line = line.rstrip()
-                words = line.split()
-                if (words is not None) and (len(words) >= 2):
-                    self.hardcoded_lookup_dict[words[1]] = (words[0], False)
-
-        with open(self.repo + '/build-tools/wrs_orig.txt', 'r') as lookup_file:
-            for line in lookup_file:
-                line = line.rstrip()
-                words = line.split()
-                if (words is not None) and (len(words) >= 1):
-                    self.hardcoded_lookup_dict[words[0]] = ('No download', True)
-
-    @staticmethod
-    def path_to_origin(filepath):
-        for path in IPReport.__KNOWN_PATHS:
-            if filepath.startswith(path[0]) and (not path[1].lower().startswith('unknown')):
-                return path[1] + '/' + os.path.basename(filepath)
-        return 'Unknown'
-
-    def hardcoded_lookup(self, package_name):
-        if package_name in self.hardcoded_lookup_dict.keys():
-            return self.hardcoded_lookup_dict[package_name]
-        return None, False
-
-    def check_env(self):
-        if 'MY_WORKSPACE' in os.environ:
-            self.workspace = os.environ['MY_WORKSPACE']
-        else:
-            print('Could not find $MY_WORKSPACE')
-            raise IOError('Could not fine $MY_WORKSPACE')
-
-        if 'MY_REPO' in os.environ:
-            self.repo = os.environ['MY_REPO']
-        else:
-            print('Could not find $MY_REPO')
-            raise IOError('Could not fine $MY_REPO')
-
-    def do_bin_pkgs(self):
-        print('Gathering binary package information')
-        self.read_bin_pkgs()
-
-    def read_bin_pkgs(self):
-        self.bin_pkgs = list()
-        ts = rpm.TransactionSet()
-        ts.setVSFlags(rpm._RPMVSF_NODIGESTS | rpm._RPMVSF_NOSIGNATURES)
-        for filename in self.shipped_binaries:
-            if filename.endswith('rpm'):
-                bin_pkg = BinPackage(self.workspace + '/export/dist/isolinux/Packages/' + filename, ts)
-                self.bin_pkgs.append(bin_pkg)
-
-    def do_src_report(self, copy_packages=False, do_wrs=True, delta_file=None, output_path=None, strip_unchanged=False):
-        self.bin_to_src()
-        self.src_pkgs.sort()
-
-        if delta_file is not None:
-            self.delta(delta_file)
-
-        if output_path is None:
-            output_path = self.workspace + '/export/ip_report'
-
-        # Create output dir (if required)
-        if not os.path.exists(output_path):
-            os.makedirs(output_path)
-
-        # Create paths for RPMs (if required)
-        if copy_packages:
-            if not os.path.exists(output_path + '/non_wrs'):
-                shutil.rmtree(output_path + '/non_wrs', True)
-                os.makedirs(output_path + '/non_wrs')
-            if do_wrs:
-                shutil.rmtree(output_path + '/wrs', True)
-                os.makedirs(output_path + '/wrs')
-
-        with open(output_path + '/srcreport.csv', 'wb') as src_report_file:
-            src_report_writer = csv.writer(src_report_file)
-
-            # Write header row
-            src_report_writer.writerow(
-                ['Package File', 'File Name', 'Package Name', 'Version', 'SHA1', 'Disclosed By',
-                 'Description', 'Part Of (Runtime, Host, Both)', 'Modified (Yes, No)', 'Hardware Interfacing (Yes, No)',
-                 'License(s) Found', 'Package Download URL', 'Kernel module', 'Notes'])
-
-            for src_pkg in self.src_pkgs:
-                if src_pkg.modified:
-                    modified_string = 'Yes'
-                else:
-                    modified_string = 'No'
-                if src_pkg.kernel_module:
-                    kmod_string = 'Yes'
-                else:
-                    kmod_string = 'No'
-
-                # Copy the pacakge and get the SHA
-                if copy_packages:
-                    if src_pkg.wrs is False:
-                        shutil.copyfile(src_pkg.path, output_path + '/non_wrs/' + src_pkg.filename)
-                        shasumout = subprocess.check_output(
-                            ['shasum', output_path + '/non_wrs/' + src_pkg.filename]).split()[0]
-                        src_pkg.sha = shasumout
-                        if strip_unchanged and (src_pkg.notes.lower().startswith('unchanged')):
-                            os.remove(output_path + '/non_wrs/' + src_pkg.filename)
-                    else:
-                        if do_wrs:
-                            shutil.copyfile(src_pkg.path, output_path + '/wrs/' + src_pkg.filename)
-                            shasumout = subprocess.check_output(
-                                ['shasum', output_path + '/wrs/' + src_pkg.filename]).split()[0]
-                            src_pkg.sha = shasumout
-                            if strip_unchanged and (src_pkg.notes.lower().startswith('unchanged')):
-                                os.remove(output_path + '/wrs/' + src_pkg.filename)
-
-                if do_wrs or (src_pkg.wrs is False):
-                    src_report_writer.writerow(
-                        [src_pkg.filename, src_pkg.name, src_pkg.version, src_pkg.sha, src_pkg.disclosed_by,
-                         src_pkg.desc, 'Runtime', src_pkg.shipped_as, modified_string, 'No', src_pkg.licences,
-                         src_pkg.origin, kmod_string, src_pkg.notes])
-                    if 'unknown' in src_pkg.origin.lower():
-                        print(
-                        'Warning: Could not determine origin of ' + src_pkg.name + '.  Please investigate/populate manually')
-
-    def bin_to_src(self):
-        self.src_pkgs = list()
-        src_pkg_names = list()
-        for bin_pkg in self.bin_pkgs:
-            if src_pkg_names.__contains__(bin_pkg.source):
-                if bin_pkg.kernel_module:
-                    for src_pkg in self.src_pkgs:
-                        if src_pkg.filename == bin_pkg.source:
-                            src_pkg.kernel_module = True
-                            break
-
-                continue
-
-            # if we reach here, then the source package is not yet in our db.
-            # we first search for the source package in the built-rpms
-            if 'shim-signed' in bin_pkg.source:
-                for tmp in self.built_src_pkgs:
-                    if 'shim-signed' in tmp:
-                        print('shim-signed hack -- ' + bin_pkg.source + ' to ' + tmp)
-                        bin_pkg.source = tmp
-                        break
-            if 'shim-unsigned' in bin_pkg.source:
-                for tmp in self.built_src_pkgs:
-                    if 'shim-0' in tmp:
-                        print('shim-unsigned hack -- ' + bin_pkg.source + ' to ' + tmp)
-                        bin_pkg.source = tmp
-                        break
-            if 'grub2-efi-pxeboot' in bin_pkg.source:
-                for tmp in self.built_src_pkgs:
-                    if 'grub2-2' in tmp:
-                        print('grub2-efi-pxeboot hack -- ' + bin_pkg.source + ' to ' + tmp)
-                        bin_pkg.source = tmp
-                        break
-
-            if bin_pkg.source in self.built_src_pkgs:
-                src_pkg = self.built_src_pkgs[bin_pkg.source]
-                src_pkg.modified = True
-
-                # First guess, we see if there's an original source with the source package name
-                # (this is 99% of the cases)
-                src_pkg_orig_name = src_pkg.name
-                if src_pkg_orig_name in self.original_src_pkgs:
-                    src_pkg.original_src = self.original_src_pkgs[src_pkg_orig_name]
-                    src_pkg.origin = src_pkg.original_src.origin
-
-            else:
-                src_pkg_path = self.locate_in_mirror(bin_pkg.source)
-                if not os.path.isabs(src_pkg_path):
-                    continue
-                src_pkg = SrcPackage(src_pkg_path)
-                src_pkg.origin = IPReport.path_to_origin(src_pkg_path)
-                src_pkg.modified = False
-
-            if bin_pkg.kernel_module:
-                src_pkg.kernel_module = True
-
-            src_pkg_names.append(bin_pkg.source)
-            self.src_pkgs.append(src_pkg)
-
-            if src_pkg.origin.lower() == 'unknown':
-                if 'windriver' in src_pkg.licences.lower():
-                    src_pkg.origin = 'No download'
-                else:
-                    if src_pkg.url is not None:
-                        src_pkg.origin = src_pkg.url
-
-            if 'unknown' in src_pkg.origin.lower():
-                (orig, is_wrs) = self.hardcoded_lookup(src_pkg.name)
-                if orig is not None:
-                    src_pkg.origin = orig
-                    src_pkg.wrs = is_wrs
-
-            if (src_pkg.origin.lower() == 'no download') and ('windriver' in src_pkg.licences.lower()):
-                src_pkg.wrs = True
-
-    def locate_in_mirror(self, filename):
-        """ takes an RPM filename and finds the full path of the file """
-
-        fullpath = None
-
-        # Old or new location of centos repo?
-        if os.path.isdir(self.repo + '/centos-repo/'):
-            filename = filename.replace('mirror:', self.repo + '/centos-repo/')
-        elif os.path.isdir(self.repo + '/cgts-centos-repo/'):
-            filename = filename.replace('mirror:', self.repo + '/cgcs-centos-repo/')
-        else:
-            filename = filename.replace('mirror:', self.repo + '/centos-repo/')
-
-        filename = filename.replace('repo:', self.repo + '/')
-
-        # At this point, filename could be a complete path (incl symlink), or just a filename
-        best_guess = filename
-        filename = os.path.basename(filename)
-
-        for path in IPReport.__KNOWN_PATHS:
-            if os.path.exists(path[0] + '/' + filename):
-                fullpath = path[0] + '/' + filename
-                break
-
-        if fullpath is not None:
-            return fullpath
-        else:
-            return best_guess
-
-    def build_original_src_pkgs(self):
-        for root, dirs, files in os.walk(self.repo):
-            for name in files:
-                if name == 'srpm_path':
-                    with open(os.path.join(root, 'srpm_path'), 'r') as srpm_path_file:
-                        original_srpm_file = srpm_path_file.readline().rstrip()
-                        original_src_pkg_path = self.locate_in_mirror(original_srpm_file)
-                        original_src_pkg = SrcPackage(original_src_pkg_path)
-                        original_src_pkg.origin = IPReport.path_to_origin(original_src_pkg_path)
-                        self.original_src_pkgs[original_src_pkg.name] = original_src_pkg
-
-    def build_built_src_pkgs(self):
-        """ Create a dict of any source package that we built ourselves """
-        for build in ['std', 'rt']:
-            for root, dirs, files in os.walk(self.workspace + '/' + build + '/rpmbuild/SRPMS'):
-                for name in files:
-                    if name.endswith('.src.rpm'):
-                        built_src_pkg = SrcPackage(os.path.join(root, name))
-                        self.built_src_pkgs[built_src_pkg.filename] = built_src_pkg
-
-    def delta(self, orig_report):
-        if orig_report is None:
-            return
-        delta_src_pkgs = self.read_last_report(orig_report)
-
-        for pkg in self.src_pkgs:
-            if pkg.name in delta_src_pkgs:
-                old_pkg = delta_src_pkgs[pkg.name]
-                if old_pkg.version == pkg.version:
-                    pkg.notes = 'Unchanged'
-                else:
-                    pkg.notes = 'New version'
-            else:
-                pkg.notes = 'New package'
-
-    def read_last_report(self, orig_report):
-        orig_pkg_dict = dict()
-        with open(orig_report, 'rb') as orig_report_file:
-            orig_report_reader = csv.reader(orig_report_file)
-            doneHeader = False
-            for row in orig_report_reader:
-                if (not doneHeader) and ('package file name' in row[0].lower()):
-                    doneHeader = True
-                    continue
-                doneHeader = True
-                orig_pkg = SrcPackage()
-                orig_pkg.filename = row[0]
-                orig_pkg.name = row[1]
-                orig_pkg.version = row[2]
-                # sha = row[3]
-                orig_pkg.disclosed_by = row[4]
-                orig_pkg.desc = row[5]
-                # runtime = row[6]
-                orig_pkg.shipped_as = row[7]
-                if row[8].lower is 'yes':
-                    orig_pkg.modified = True
-                else:
-                    orig_pkg.modifed = False
-                # hardware interfacing = row[9]
-                orig_pkg.licences = row[10]
-                orig_pkg.origin = row[11]
-                if row[12].lower is 'yes':
-                    orig_pkg.kernel_module = True
-                else:
-                    orig_pkg.kernel_module = False
-                orig_pkg_dict[orig_pkg.name] = orig_pkg
-
-        return orig_pkg_dict
-
-
-def main(argv):
-    # handle command line arguments
-    # -h/--help       -- help
-    # -n/--no-copy    -- do not copy files (saves time)
-    # -d/--delta=     -- compare with an ealier report
-    # -o/--output=    -- output report/binaries to specified path
-    # -w/--workspace= -- use specified workspace instead of $WORKSPACE
-    # -r/--repo=      -- use sepeciied repo instead of $MY_REPO
-    # -s              -- strip (remove) unchanged packages from copy out directory
-
-    try:
-        opts, args = getopt.getopt(argv, "hnd:o:w:r:s",
-                                   ["delta=", "help", "no-copy", "workspace=", "repo=", "output=", "--strip"])
-    except getopt.GetoptError:
-        # todo - output help
-        sys.exit(2)
-    delta_file = None
-    do_copy = True
-    workspace = None
-    repo = None
-    output_path = None
-    strip_unchanged = False
-
-    for opt, arg in opts:
-        if opt in ('-h', '--help'):
-            print('usage:')
-            print(' ip_report.py [options]')
-            print(' Creates and IP report in $MY_WORKSPACE/export/ip_report ')
-            print(' Source RPMs (both Wind River and non WR) are placed in subdirs within that path')
-            print('')
-            print('Options:')
-            print('  -h/--help                - this help')
-            print('  -d <file>/--delta=<file> - create "notes" field, comparing report with a previous report')
-            print('  -n/--no-copy             - do not copy files into subdirs (this is faster, but means you')
-            print('                             don\'t get SHA sums for files)')
-            print('  -w <path>/--workspace=<path> - use the specified path as workspace, instead of $MY_WORKSPACE')
-            print('  -r <path>/--repo=<path>  - use the specified path as repo, instead of $MY_REPO')
-            print('  -o <path>/--output=<path> - output to specified path (instead of $MY_WORKSPACE/export/ip_report)')
-            print('  -s/--strip               - strip (remove) unchanged files if copied')
-            exit()
-        elif opt in ('-d', '--delta'):
-            delta_file = os.path.normpath(arg)
-            delta_file = os.path.expanduser(delta_file)
-            if not os.path.exists(delta_file):
-                print('Cannot locate ' + delta_file)
-                exit(1)
-        elif opt in ('-w', '--workspace'):
-            workspace = os.path.normpath(arg)
-            workspace = os.path.expanduser(workspace)
-        elif opt in ('-r', '--repo'):
-            repo = os.path.normpath(arg)
-            repo = os.path.expanduser(repo)
-        elif opt in ('-o', '--output'):
-            output_path = os.path.normpath(arg)
-            output_path = os.path.expanduser(output_path)
-        elif opt in ('-n', '--no-copy'):
-            do_copy = False
-        elif opt in ('-s', '--strip-unchanged'):
-            strip_unchanged = True
-
-    print('Doing IP report')
-    if delta_file is not None:
-        print('Delta from ' + delta_file)
-    else:
-        print('No delta specified')
-    ip_report = IPReport(workspace=workspace, repo=repo)
-
-    ip_report.do_bin_pkgs()
-    ip_report.do_src_report(copy_packages=do_copy,
-                            delta_file=delta_file,
-                            output_path=output_path,
-                            strip_unchanged=strip_unchanged)
-
-
-if __name__ == "__main__":
-    main(sys.argv[1:])
diff --git a/build-tools/make-installer-images.sh b/build-tools/make-installer-images.sh
deleted file mode 100755
index e1ca114c..00000000
--- a/build-tools/make-installer-images.sh
+++ /dev/null
@@ -1,343 +0,0 @@
-#!/bin/bash -e
-## this script is called by "update-pxe-network-installer" and run in "sudo"
-## created by Yong Hu (yong.hu@intel.com), 05/24/2018
-
-function clean_rootfs {
-    rootfs_dir=$1
-    echo "--> remove old files in original rootfs"
-    conf="$(ls ${rootfs_dir}/etc/ld.so.conf.d/kernel-*.conf)"
-    echo "conf basename = $(basename $conf)"
-    old_version="tbd"
-    if [ -f $conf ]; then
-        old_version="$(echo $(basename $conf) | rev | cut -d'.' -f2- | rev | cut -d'-' -f2-)"
-    fi
-    echo "old version is $old_version"
-    # remove old files in original initrd.img
-    # do this in chroot to avoid accidentialy wrong operations on host root
-chroot $rootfs_dir /bin/bash -x <<EOF
-    rm -rf ./boot/ ./etc/modules-load.d/
-    if [ -n $old_version ] &&  [ -f ./etc/ld.so.conf.d/kernel-${old_version}.conf ]; then
-        rm -rf ./etc/ld.so.conf.d/kernel-${old_version}.conf
-        rm -rf ./lib/modules/${old_version}
-    fi
-    if [ -d ./usr/lib64/python2.7/site-packages/pyanaconda/ ];then
-            rm -rf usr/lib64/python2.7/site-packages/pyanaconda/
-        fi
-        if [ -d ./usr/lib64/python2.7/site-packages/rpm/ ];then
-            rm -rf usr/lib64/python2.7/site-packages/rpm/
-        fi
-        #find old .pyo files and delete them
-        all_pyo="`find ./usr/lib64/python2.7/site-packages/pyanaconda/ usr/lib64/python2.7/site-packages/rpm/ -name *.pyo`"
-        if [ -n $all ]; then
-            for pyo in $all_pyo;do
-                rm -f $pyo
-            done
-        fi
-        exit
-EOF
-    #back to previous folder
-}
-
-
-echo "This script makes new initrd.img, vmlinuz and squashfs.img."
-echo "NOTE: it has to be executed with *root*!"
-
-if [ $# -lt 1 ];then
-    echo "$0 <work_dir>"
-    exit -1;
-fi
-
-work_dir=$1
-output_dir=$work_dir/output
-if [ ! -d $output_dir ]; then
-    mkdir -p $output_dir;
-fi
-
-timestamp=$(date +%F_%H%M)
-
-echo "---------------- start to make new initrd.img and vmlinuz -------------"
-ORIG_INITRD=$work_dir/orig/initrd.img
-if [ ! -f $ORIG_INITRD ];then
-    echo "ERROR: $ORIG_INITRD does NOT exist!"
-    exit -1
-fi
-
-kernel_rpms_dir=$work_dir/kernel-rpms
-if [ ! -d $kernel_rpms_dir ];then
-    echo "ERROR: $kernel_rpms_dir does NOT exist!"
-    exit -1
-fi
-
-firmware_rpms_dir=${work_dir}/firmware-rpms
-if [ ! -d ${firmware_rpms_dir} ];then
-    echo "ERROR: ${firmware_rpms_dir} does NOT exist!"
-    exit -1
-fi
-firmware_list_file=${work_dir}/firmware-list
-
-
-initrd_root=$work_dir/initrd.work
-if [ -d $initrd_root ];then
-    rm -rf $initrd_root
-fi
-mkdir -p $initrd_root
-
-cd $initrd_root
-# uncompress initrd.img
-echo "--> uncompress original initrd.img"
-/usr/bin/xzcat $ORIG_INITRD | cpio -i
-
-echo "--> clean up $initrd_root"
-clean_rootfs $initrd_root
-
-echo "--> extract files from new kernel and its modular rpms to initrd root"
-for kf in ${kernel_rpms_dir}/std/*.rpm ; do rpm2cpio $kf | cpio -idu; done
-
-echo "--> extract files from new firmware rpms to initrd root"
-if [ -f ${firmware_list_file} ]; then
-    echo "--> extract files from new firmware rpm to initrd root"
-    firmware_list=`cat ${firmware_list_file}`
-    for fw in ${firmware_rpms_dir}/std/*.rpm ; do rpm2cpio ${fw} | cpio -iduv ${firmware_list}; done
-fi
-
-# by now new kernel and its modules exist!
-# find new kernel in /boot/vmlinuz-* or /lib/modules/*/vmlinuz
-echo "--> get new kernel image: vmlinuz"
-new_kernel="$(ls ./boot/vmlinuz-* 2>/dev/null || ls ./lib/modules/*/vmlinuz 2>/dev/null || true)"
-echo "New kernel: \"${new_kernel}\""
-if [ -f "${new_kernel}" ];then
-    # copy out the new kernel
-    if [ -f $output_dir/new-vmlinuz ]; then
-        mv -f $output_dir/new-vmlinuz $output_dir/vmlinuz-backup-$timestamp
-    fi
-    cp -f $new_kernel $output_dir/new-vmlinuz
-
-    if echo "${new_kernel}" | grep -q '^\./boot/vmlinuz'; then
-        kernel_name=$(basename $new_kernel)
-        new_ver=$(echo $kernel_name | cut -d'-' -f2-)
-        system_map="boot/System.map-${new_ver}"
-    elif echo "${new_kernel}" | grep -q '^\./lib/modules/'; then
-        new_ver="$(echo "${new_kernel}" | sed 's#^\./lib/modules/\([^/]\+\)/.*$#\1#')"
-        system_map="lib/modules/${new_ver}/System.map"
-    else
-        echo "Unrecognized new kernel path: ${new_kernel}"
-        exit -1
-    fi
-
-    if [ -z "${new_ver}" ]; then
-        echo "Could not determine new kernel version"
-        exit -1
-    fi
-
-    echo "New kernel version: ${new_ver}"
-
-    if ! [ -f "${system_map}" ]; then
-        echo "Could not find System.map file at: ${system_map}"
-        exit -1
-    fi
-else
-    echo "ERROR: new kernel is NOT found!"
-    exit -1
-fi
-
-echo "-->check module dependencies in new initrd.img in chroot context"
-chroot $initrd_root /bin/bash -x <<EOF
-/usr/sbin/depmod -aeF "/${system_map}" "$new_ver"
-if [ $? == 0 ]; then echo "module dependencies are satisfied!" ; fi
-## Remove the biosdevname package!
-rm -f ./usr/lib/udev/rules.d/71-biosdevname.rules ./usr/sbin/biosdevname
-exit
-EOF
-
-echo "-->patch usr/lib/net-lib.sh with IPv6 improvements from newer dracut"
-patch usr/lib/net-lib.sh <<EOF
---- ../initrd.orig/usr/lib/net-lib.sh   2020-08-18 19:37:17.063163840 -0400
-+++ usr/lib/net-lib.sh  2020-08-19 09:47:15.237089800 -0400
-@@ -645,7 +645,8 @@
-     timeout=\$((\$timeout*10))
-
-     while [ \$cnt -lt \$timeout ]; do
--        [ -z "\$(ip -6 addr show dev "\$1" scope link tentative)" ] \\
-+        [ -n "\$(ip -6 addr show dev "\$1" scope link)" ] \\
-+            && [ -z "\$(ip -6 addr show dev "\$1" scope link tentative)" ] \\
-             && return 0
-         [ -n "\$(ip -6 addr show dev "\$1" scope link dadfailed)" ] \\
-             && return 1
-@@ -662,7 +663,9 @@
-     timeout=\$((\$timeout*10))
-
-     while [ \$cnt -lt \$timeout ]; do
--        [ -z "\$(ip -6 addr show dev "\$1" tentative)" ] \\
-+        [ -n "\$(ip -6 addr show dev "\$1")" ] \\
-+            && [ -z "\$(ip -6 addr show dev "\$1" tentative)" ] \\
-+            && [ -n "\$(ip -6 route list proto ra dev "\$1" | grep ^default)" ] \\
-             && return 0
-         [ -n "\$(ip -6 addr show dev "\$1" dadfailed)" ] \\
-             && return 1
-@@ -679,8 +682,9 @@
-     timeout=\$((\$timeout*10))
-
-     while [ \$cnt -lt \$timeout ]; do
--        [ -z "\$(ip -6 addr show dev "\$1" tentative)" ] \\
--            && [ -n "\$(ip -6 route list proto ra dev "\$1")" ] \\
-+        [ -n "\$(ip -6 addr show dev "\$1")" ] \\
-+            && [ -z "\$(ip -6 addr show dev "\$1" tentative)" ] \\
-+            && [ -n "\$(ip -6 route list proto ra dev "\$1" | grep ^default)" ] \\
-             && return 0
-         sleep 0.1
-         cnt=\$((\$cnt+1))
-EOF
-
-echo "-->patch usr/lib/dracut/hooks/pre-trigger/03-lldpad.sh with rd.fcoe disabling support"
-patch usr/lib/dracut/hooks/pre-trigger/03-lldpad.sh <<EOF
---- ../initrd.orig/usr/lib/dracut/hooks/pre-trigger/03-lldpad.sh	2021-05-12 16:32:44.007007124 -0400
-+++ usr/lib/dracut/hooks/pre-trigger/03-lldpad.sh	2021-05-12 16:35:31.321509139 -0400
-@@ -1,5 +1,10 @@
- #!/bin/bash
- 
-+if ! getargbool 0 rd.fcoe -d -n rd.nofcoe; then
-+    info "rd.fcoe=0: skipping lldpad activation"
-+    return 0
-+fi
-+
- # Note lldpad will stay running after switchroot, the system initscripts
- # are to kill it and start a new lldpad to take over. Data is transfered
- # between the 2 using a shm segment
-EOF
-
-echo "-->patch usr/lib/dracut/hooks/cmdline/99-parse-fcoe.sh with rd.fcoe disabling support"
-patch usr/lib/dracut/hooks/cmdline/99-parse-fcoe.sh <<EOF
---- ../initrd.orig/usr/lib/dracut/hooks/cmdline/99-parse-fcoe.sh	2021-05-12 16:32:44.008007121 -0400
-+++ usr/lib/dracut/hooks/cmdline/99-parse-fcoe.sh	2021-05-12 16:36:56.874254504 -0400
-@@ -20,6 +20,10 @@
- # If it's not set we don't continue
- [ -z "$fcoe" ] && return
- 
-+if ! getargbool 0 rd.fcoe -d -n rd.nofcoe; then
-+    info "rd.fcoe=0: skipping fcoe"
-+    return 0
-+fi
- 
- # BRCM: Later, should check whether bnx2x is loaded first before loading bnx2fc so do not load bnx2fc when there are no Broadcom adapters
- [ -e /sys/bus/fcoe/ctlr_create ] || modprobe -b -a fcoe || die "FCoE requested but kernel/initrd does not support FCoE"
-EOF
-
-echo "--> Rebuild the initrd"
-if [ -f $output_dir/new-initrd.img ]; then
-    mv -f $output_dir/new-initrd.img $output_dir/initrd.img-backup-$timestamp
-fi
-find . | cpio -o -H newc | xz --check=crc32 --x86 --lzma2=dict=512KiB > $output_dir/new-initrd.img
-if [ $? != 0 ];then
-    echo "ERROR: failed to create new initrd.img"
-    exit -1
-fi
-
-cd $work_dir
-
-if [ -f $output_dir/new-initrd.img ];then
-    ls -l $output_dir/new-initrd.img
-else
-    echo "ERROR: new-initrd.img is not generated!"
-    exit -1
-fi
-
-if [ -f $output_dir/new-vmlinuz ];then
-    ls -l $output_dir/new-vmlinuz
-else
-    echo "ERROR: new-vmlinuz is not generated!"
-    exit -1
-fi
-
-echo "---------------- start to make new squashfs.img -------------"
-ORIG_SQUASHFS=$work_dir/orig/squashfs.img
-if [ ! -f $ORIG_SQUASHFS ];then
-    echo "ERROR: $ORIG_SQUASHFS does NOT exist!"
-    exit -1
-fi
-
-rootfs_rpms_dir=$work_dir/rootfs-rpms
-if [ ! -d $rootfs_rpms_dir ];then
-    echo "ERROR: $rootfs_rpms_dir does NOT exist!"
-    exit -1
-fi
-
-# make squashfs.mnt and ready and umounted
-if [ ! -d $work_dir/squashfs.mnt ];then
-    mkdir -p $work_dir/squashfs.mnt
-else
-    # in case it was mounted previously
-    mnt_path=$(mount | grep "squashfs.mnt" | cut -d' ' -f3-3)
-    if [ x"$mnt_path" != "x" ] &&  [ "$(basename $mnt_path)" == "squashfs.mnt" ];then
-        umount $work_dir/squashfs.mnt
-    fi
-fi
-
-# make squashfs.work ready and umounted
-squashfs_root="$work_dir/squashfs.work"
-# Now mount the rootfs.img file:
-if [ ! -d $squashfs_root ];then
-    mkdir -p $squashfs_root
-else
-    # in case it was mounted previously
-    mnt_path=$(mount | grep "$(basename $squashfs_root)" | cut -d' ' -f3-3)
-    if [ x"$mnt_path" != "x" ] &&  [ "$(basename $mnt_path)" == "$(basename $squashfs_root)" ];then
-        umount $squashfs_root
-    fi
-fi
-
-echo $ORIG_SQUASHFS
-mount -o loop -t squashfs $ORIG_SQUASHFS $work_dir/squashfs.mnt
-
-if [ ! -d ./LiveOS ]; then
-    mkdir -p ./LiveOS
-fi
-
-echo "--> copy rootfs.img from original squashfs.img to LiveOS folder"
-cp -f ./squashfs.mnt/LiveOS/rootfs.img ./LiveOS/.
-
-echo "--> done to copy rootfs.img, umount squashfs.mnt"
-umount ./squashfs.mnt
-
-echo "--> mount rootfs.img into $squashfs_root"
-mount -o loop LiveOS/rootfs.img $squashfs_root
-
-echo "--> clean up ./squashfs-rootfs from original squashfs.img in chroot context"
-clean_rootfs $squashfs_root
-
-cd $squashfs_root
-echo "--> extract files from rootfs-rpms to squashfs root"
-for ff in $rootfs_rpms_dir/*.rpm ; do rpm2cpio $ff | cpio -idu; done
-
-echo "--> extract files from kernel and its modular rpms to squashfs root"
-for kf in ${kernel_rpms_dir}/std/*.rpm ; do rpm2cpio $kf | cpio -idu; done
-
-echo "-->check module dependencies in new squashfs.img in chroot context"
-#we are using the same new  kernel-xxx.rpm, so the $new_ver is the same
-chroot $squashfs_root /bin/bash -x <<EOF
-/usr/sbin/depmod -aeF "/${system_map}" "$new_ver"
-if [ $? == 0 ]; then echo "module dependencies are satisfied!" ; fi
-## Remove the biosdevname package!
-rm -f ./usr/lib/udev/rules.d/71-biosdevname.rules ./usr/sbin/biosdevname
-exit
-EOF
-
-# come back to the original work dir
-cd $work_dir
-
-echo "--> unmount $squashfs_root"
-umount $squashfs_root
-#rename the old version
-if [ -f $output_dir/new-squashfs.img ]; then
-    mv -f $output_dir/new-squashfs.img $output_dir/squashfs.img-backup-$timestamp
-fi
-
-echo "--> make the new squashfs image"
-mksquashfs LiveOS $output_dir/new-squashfs.img -keep-as-directory -comp xz -b 1M
-if [ $? == 0 ];then
-    ls -l $output_dir/new-squashfs.img
-else
-    echo "ERROR: failed to make a new squashfs.img"
-    exit -1
-fi
-
-echo "--> done successfully!"
diff --git a/build-tools/mk/_sign_pkgs.mk b/build-tools/mk/_sign_pkgs.mk
deleted file mode 100644
index aa92b0e8..00000000
--- a/build-tools/mk/_sign_pkgs.mk
+++ /dev/null
@@ -1,31 +0,0 @@
-
-#
-# this makefile is used by the build-iso process to add file signature to all rpms
-# 
-# it requires a private key, passed as the variable KEY
-
-PKGS_LIST := $(wildcard *.rpm)
-
-# we need to skip the signature of some packages that
-# might be installed in file systems that do not support extended attributes
-# in the case of shim- and grub2-efi-, the UEFI configuration installs them in a VFAT file system
-PKGS_TO_SKIP := $(wildcard grub2-efi-[0-9]*.x86_64.rpm grub2-efi-x64-[0-9]*.x86_64.rpm shim-[0-9]*.x86_64.rpm shim-x64-[0-9]*.x86_64.rpm shim-ia32-[0-9]*.x86_64)
-
-PKGS_TO_SIGN = $(filter-out $(PKGS_TO_SKIP),$(PKGS_LIST))
-
-define _pkg_sign_tmpl
-
-_sign_$1 :
-	@ rpmsign --signfiles --fskpath=$(KEY) $1
-	@ chown mockbuild $1
-	@ chgrp users $1
-
-sign : _sign_$1
-
-endef
-
-sign :
-	@echo signed all packages
-
-$(foreach file,$(PKGS_TO_SIGN),$(eval $(call _pkg_sign_tmpl,$(file))))
-
diff --git a/build-tools/mockchain-parallel b/build-tools/mockchain-parallel
deleted file mode 100755
index 73029df6..00000000
--- a/build-tools/mockchain-parallel
+++ /dev/null
@@ -1,114 +0,0 @@
-#!/bin/bash
-#
-
-#
-# Copyright (c) 2018 Wind River Systems, Inc.
-#
-# SPDX-License-Identifier: Apache-2.0
-#
-
-#
-# The following tries to choose the best mockchain-parallel-* implementation
-# to use, based on the version of /usr/bin/mockchain
-#
-# We want to use a compatable API, and to use the same python version.
-#
-
-interpreter_path () {
-    local path=${1}
-    if [ ! -f ${path} ]; then
-        return 1
-    fi
-    readlink -f $(head -n 1 ${path} | sed 's/^#!//' | awk '{ print $1 }' )
-}
-
-get__version__ () {
-    local path=${1}
-    local var=""
-    if [ ! -f ${path} ]; then
-        return 1
-    fi
-    if file ${path} | grep -q 'Python script'; then
-        ver=$(grep __VERSION__= ${path} | cut -d '=' -f 2 | sed 's/"//g')
-    else
-        ver=$(${path} --version 2> /dev/null)
-    fi
-    echo $ver
-}
-
-VC_LESS_THAN=0
-VC_EQUAL=1
-VC_GREATER_THAN=2
-ver_comp () {
-    local v1=${1}
-    local v2=${2}
-    local v_greater=""
-
-    if [ "${v1}" == "${v2}" ]; then
-        echo $VC_EQUAL
-        return
-    fi
-
-    v_greater=$((echo ${v1}; echo ${v2}) | sort -rV | head -n 1)
-    if [ "${v1}" == "${v_greater}" ]; then
-        echo $VC_GREATER_THAN
-        return
-    fi
-
-    echo $VC_LESS_THAN
-}
-
-MOCKCHAIN_PATH="/usr/bin/mockchain"
-MOCKCHAIN_PARALLEL_PATH_ROOT="${MY_REPO}/build-tools/mockchain-parallel"
-DEFAULT_MOCKCHAIN_PARALLEL_PATH="${MOCKCHAIN_PARALLEL_PATH_ROOT}-1.3.4"
-
-MOCKCHAIN_INTERPRETER_PATH=$(interpreter_path ${MOCKCHAIN_PATH})
-MOCKCHAIN_VER=$(get__version__ ${MOCKCHAIN_PATH})
-if [ -z "${MOCKCHAIN_VER}" ]; then
-    MOCKCHAIN_VER=$(rpm -q --queryformat '%{VERSION}' mock)
-    if [ -z "${MOCKCHAIN_VER}" ]; then
-        echo "Error: Failed to determine version of '${MOCKCHAIN_PATH}'"
-        exit 1
-    fi
-fi
-
-BEST_VER=""
-BEST_MOCKCHAIN_PARALLEL_PATH=""
-
-for MOCKCHAIN_PARALLEL_PATH in $(ls -1 ${MOCKCHAIN_PARALLEL_PATH_ROOT}-*); do
-    MOCKCHAIN_PARALLEL_VER=$(get__version__ ${MOCKCHAIN_PARALLEL_PATH})
-    if [ -z "${MOCKCHAIN_PARALLEL_VER}" ]; then
-        echo "Warning: Failed to determine version of '${MOCKCHAIN_PARALLEL_PATH}'"
-        continue
-    fi
-    COMP=$(ver_comp "${MOCKCHAIN_VER}" "${MOCKCHAIN_PARALLEL_VER}")
-    echo $MOCKCHAIN_PARALLEL_PATH $MOCKCHAIN_PARALLEL_VER $COMP
-    if [ $COMP -eq $VC_EQUAL ]; then
-        BEST_VER=${MOCKCHAIN_PARALLEL_VER}
-        BEST_MOCKCHAIN_PARALLEL_PATH=${MOCKCHAIN_PARALLEL_PATH}
-        break
-    fi
-    if [ $COMP -gt $VC_EQUAL ]; then
-        if [ "${BEST_VER}" == "" ]; then
-            BEST_VER=${MOCKCHAIN_PARALLEL_VER}
-            BEST_MOCKCHAIN_PARALLEL_PATH=${MOCKCHAIN_PARALLEL_PATH}
-            continue
-        fi
-
-        COMP=$(ver_comp ${MOCKCHAIN_PARALLEL_VER} ${BEST_VER})
-        if [ $COMP -gt $VC_EQUAL ]; then
-            BEST_VER=${MOCKCHAIN_PARALLEL_VER}
-            BEST_MOCKCHAIN_PARALLEL_PATH=${MOCKCHAIN_PARALLEL_PATH}
-        fi
-    fi
-done
-
-MOCKCHAIN_PARALLEL_INTERPRETER_PATH=${BEST_MOCKCHAIN_PARALLEL_INTERPRETER_PATH}
-MOCKCHAIN_PARALLEL_PATH=${BEST_MOCKCHAIN_PARALLEL_PATH}
-
-if [ -z "${MOCKCHAIN_PARALLEL_PATH}" ]; then
-    MOCKCHAIN_PARALLEL_PATH="${DEFAULT_MOCKCHAIN_PARALLEL_PATH}"
-fi
-
-echo "PYTHONDONTWRITEBYTECODE=true exec ${MOCKCHAIN_PARALLEL_INTERPRETER_PATH} ${MOCKCHAIN_PARALLEL_PATH} $@"
-PYTHONDONTWRITEBYTECODE=true exec ${MOCKCHAIN_PARALLEL_INTERPRETER_PATH} ${MOCKCHAIN_PARALLEL_PATH} "$@"
diff --git a/build-tools/mockchain-parallel-1.3.4 b/build-tools/mockchain-parallel-1.3.4
deleted file mode 100755
index 826acf75..00000000
--- a/build-tools/mockchain-parallel-1.3.4
+++ /dev/null
@@ -1,1219 +0,0 @@
-#!/usr/bin/python2.7 -tt
-# -*- coding: utf-8 -*-
-# vim: noai:ts=4:sw=4:expandtab
-
-# by skvidal@fedoraproject.org
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU Library General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301  USA.
-# copyright 2012 Red Hat, Inc.
-
-# SUMMARY
-# mockchain
-# take a mock config and a series of srpms
-# rebuild them one at a time
-# adding each to a local repo
-# so they are available as build deps to next pkg being built
-from __future__ import print_function
-
-import cgi
-# pylint: disable=deprecated-module
-import optparse
-import os
-import re
-import shutil
-import subprocess
-import sys
-import tempfile
-import time
-import multiprocessing
-import signal
-import psutil
-
-import requests
-# pylint: disable=import-error
-from six.moves.urllib_parse import urlsplit
-
-import mockbuild.util
-
-from stxRpmUtils import splitRpmFilename
-
-# all of the variables below are substituted by the build system
-__VERSION__="1.3.4"
-SYSCONFDIR="/etc"
-PYTHONDIR="/usr/lib/python2.7/site-packages"
-PKGPYTHONDIR="/usr/lib/python2.7/site-packages/mockbuild"
-MOCKCONFDIR = os.path.join(SYSCONFDIR, "mock")
-# end build system subs
-
-mockconfig_path = '/etc/mock'
-
-def rpmName(path):
-    filename = os.path.basename(path)
-    (n, v, r, e, a) = splitRpmFilename(filename)
-    return n
-
-def createrepo(path):
-    global max_workers
-    if os.path.exists(path + '/repodata/repomd.xml'):
-        comm = ['/usr/bin/createrepo_c', '--update', '--retain-old-md', "%d" % max_workers, "--workers", "%d" % max_workers, path]
-    else:
-        comm = ['/usr/bin/createrepo_c', "--workers", "%d" % max_workers, path]
-    cmd = subprocess.Popen(
-        comm, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    out, err = cmd.communicate()
-    return out, err
-
-
-g_opts = optparse.Values()
-
-def parse_args(args):
-    parser = optparse.OptionParser('\nmockchain -r mockcfg pkg1 [pkg2] [pkg3]')
-    parser.add_option(
-        '-r', '--root', default=None, dest='chroot',
-        metavar="CONFIG",
-        help="chroot config name/base to use in the mock build")
-    parser.add_option(
-        '-l', '--localrepo', default=None,
-        help="local path for the local repo, defaults to making its own")
-    parser.add_option(
-        '-c', '--continue', default=False, action='store_true',
-        dest='cont',
-        help="if a pkg fails to build, continue to the next one")
-    parser.add_option(
-        '-a', '--addrepo', default=[], action='append',
-        dest='repos',
-        help="add these repo baseurls to the chroot's yum config")
-    parser.add_option(
-        '--recurse', default=False, action='store_true',
-        help="if more than one pkg and it fails to build, try to build the rest and come back to it")
-    parser.add_option(
-        '--log', default=None, dest='logfile',
-        help="log to the file named by this option, defaults to not logging")
-    parser.add_option(
-        '--workers', default=1, dest='max_workers',
-        help="number of parallel build jobs")
-    parser.add_option(
-        '--worker-resources', default="", dest='worker_resources',
-        help="colon seperated list, how much mem in gb for each workers temfs")
-    parser.add_option(
-        '--basedir', default='/var/lib/mock', dest='basedir',
-        help="path to workspace")
-    parser.add_option(
-        '--tmp_prefix', default=None, dest='tmp_prefix',
-        help="tmp dir prefix - will default to username-pid if not specified")
-    parser.add_option(
-        '-m', '--mock-option', default=[], action='append',
-        dest='mock_option',
-        help="option to pass directly to mock")
-    parser.add_option(
-        '--mark-slow-name', default=[], action='append',
-        dest='slow_pkg_names_raw',
-        help="package name that is known to build slowly")
-    parser.add_option(
-        '--mark-slow-path', default=[], action='append',
-        dest='slow_pkgs_raw',
-        help="package path that is known to build slowly")
-    parser.add_option(
-        '--mark-big-name', default=[], action='append',
-        dest='big_pkg_names_raw',
-        help="package name that is known to require a lot of disk space to build")
-    parser.add_option(
-        '--mark-big-path', default=[], action='append',
-        dest='big_pkgs_raw',
-        help="package path that is known to require a lot of disk space to build")
-    parser.add_option(
-        '--srpm-dependency-file', default=None, 
-        dest='srpm_dependency_file',
-        help="path to srpm dependency file")
-    parser.add_option(
-        '--rpm-dependency-file', default=None, 
-        dest='rpm_dependency_file',
-        help="path to rpm dependency file")
-    parser.add_option(
-        '--rpm-to-srpm-map-file', default=None, 
-        dest='rpm_to_srpm_map_file',
-        help="path to rpm to srpm map file")
-
-    opts, args = parser.parse_args(args)
-    if opts.recurse:
-        opts.cont = True
-
-    if not opts.chroot:
-        print("You must provide an argument to -r for the mock chroot")
-        sys.exit(1)
-
-    if len(sys.argv) < 3:
-        print("You must specify at least 1 package to build")
-        sys.exit(1)
-
-    return opts, args
-
-
-REPOS_ID = []
-
-slow_pkg_names={}
-slow_pkgs={}
-big_pkg_names={}
-big_pkgs={}
-
-def generate_repo_id(baseurl):
-    """ generate repository id for yum.conf out of baseurl """
-    repoid = "/".join(baseurl.split('//')[1:]).replace('/', '_')
-    repoid = re.sub(r'[^a-zA-Z0-9_]', '', repoid)
-    suffix = ''
-    i = 1
-    while repoid + suffix in REPOS_ID:
-        suffix = str(i)
-        i += 1
-    repoid = repoid + suffix
-    REPOS_ID.append(repoid)
-    return repoid
-
-
-def set_build_idx(infile, destfile, build_idx, tmpfs_size_gb, opts):
-    # log(opts.logfile, "set_build_idx: infile=%s, destfile=%s, build_idx=%d, tmpfs_size_gb=%d" % (infile, destfile, build_idx, tmpfs_size_gb))
-
-    try:
-        with open(infile) as f:
-            code = compile(f.read(), infile, 'exec')
-        # pylint: disable=exec-used
-        exec(code)
-
-        config_opts['root'] = config_opts['root'].replace('b0', 'b{0}'.format(build_idx))
-        config_opts['cache_topdir'] = config_opts['cache_topdir'].replace('b0', 'b{0}'.format(build_idx))
-        # log(opts.logfile, "set_build_idx: root=%s" % config_opts['root'])
-        # log(opts.logfile, "set_build_idx: cache_topdir=%s" % config_opts['cache_topdir'])
-        if tmpfs_size_gb > 0:
-            config_opts['plugin_conf']['tmpfs_enable'] = True
-            config_opts['plugin_conf']['tmpfs_opts'] = {}
-            config_opts['plugin_conf']['tmpfs_opts']['required_ram_mb'] = 1024
-            config_opts['plugin_conf']['tmpfs_opts']['max_fs_size'] = "%dg" % tmpfs_size_gb
-            config_opts['plugin_conf']['tmpfs_opts']['mode'] = '0755'
-            config_opts['plugin_conf']['tmpfs_opts']['keep_mounted'] = True
-            # log(opts.logfile, "set_build_idx: plugin_conf->tmpfs_enable=%s" % config_opts['plugin_conf']['tmpfs_enable'])
-            # log(opts.logfile, "set_build_idx: plugin_conf->tmpfs_opts->max_fs_size=%s" % config_opts['plugin_conf']['tmpfs_opts']['max_fs_size'])
-
-        with open(destfile, 'w') as br_dest:
-            for k, v in list(config_opts.items()):
-                br_dest.write("config_opts[%r] = %r\n" % (k, v))
-
-        try:
-            log(opts.logfile, "set_build_idx: os.makedirs %s" % config_opts['cache_topdir'])
-            if not os.path.isdir(config_opts['cache_topdir']):
-                os.makedirs(config_opts['cache_topdir'], exist_ok=True)
-        except (IOError, OSError):
-            return False, "Could not create dir: %s" % config_opts['cache_topdir']
-
-        cache_dir = "%s/%s/mock" % (config_opts['basedir'], config_opts['root'])
-        try:
-            log(opts.logfile, "set_build_idx: os.makedirs %s" % cache_dir)
-            if not os.path.isdir(cache_dir):
-                os.makedirs(cache_dir)
-        except (IOError, OSError):
-            return False, "Could not create dir: %s" % cache_dir
-
-        return True, ''
-    except (IOError, OSError):
-        return False, "Could not write mock config to %s" % destfile
-
-    return True, ''
-
-def set_basedir(infile, destfile, basedir, opts):
-    log(opts.logfile, "set_basedir: infile=%s, destfile=%s, basedir=%s" % (infile, destfile, basedir))
-    try:
-        with open(infile) as f:
-            code = compile(f.read(), infile, 'exec')
-        # pylint: disable=exec-used
-        exec(code)
-
-        config_opts['basedir'] = basedir
-        config_opts['resultdir'] = '{0}/result'.format(basedir)
-        config_opts['backup_base_dir'] = '{0}/backup'.format(basedir)
-        config_opts['root'] = 'mock/b0'
-        config_opts['cache_topdir'] = '{0}/cache/b0'.format(basedir)
-
-        with open(destfile, 'w') as br_dest:
-            for k, v in list(config_opts.items()):
-                br_dest.write("config_opts[%r] = %r\n" % (k, v))
-        return True, ''
-    except (IOError, OSError):
-        return False, "Could not write mock config to %s" % destfile
-
-    return True, ''
-
-def add_local_repo(infile, destfile, baseurl, repoid=None):
-    """take a mock chroot config and add a repo to it's yum.conf
-       infile = mock chroot config file
-       destfile = where to save out the result
-       baseurl = baseurl of repo you wish to add"""
-    global config_opts
-
-    try:
-        with open(infile) as f:
-            code = compile(f.read(), infile, 'exec')
-        # pylint: disable=exec-used
-        exec(code)
-        if not repoid:
-            repoid = generate_repo_id(baseurl)
-        else:
-            REPOS_ID.append(repoid)
-        localyumrepo = """
-[%s]
-name=%s
-baseurl=%s
-enabled=1
-skip_if_unavailable=1
-metadata_expire=0
-cost=1
-best=1
-""" % (repoid, baseurl, baseurl)
-
-        config_opts['yum.conf'] += localyumrepo
-        with open(destfile, 'w') as br_dest:
-            for k, v in list(config_opts.items()):
-                br_dest.write("config_opts[%r] = %r\n" % (k, v))
-        return True, ''
-    except (IOError, OSError):
-        return False, "Could not write mock config to %s" % destfile
-
-    return True, ''
-
-
-def do_build(opts, cfg, pkg):
-
-    # returns 0, cmd, out, err = failure
-    # returns 1, cmd, out, err  = success
-    # returns 2, None, None, None = already built
-
-    signal.signal(signal.SIGTERM, child_signal_handler)
-    signal.signal(signal.SIGINT, child_signal_handler)
-    signal.signal(signal.SIGHUP, child_signal_handler)
-    signal.signal(signal.SIGABRT, child_signal_handler)
-    s_pkg = os.path.basename(pkg)
-    pdn = s_pkg.replace('.src.rpm', '')
-    resdir = '%s/%s' % (opts.local_repo_dir, pdn)
-    resdir = os.path.normpath(resdir)
-    if not os.path.exists(resdir):
-        os.makedirs(resdir)
-
-    success_file = resdir + '/success'
-    fail_file = resdir + '/fail'
-
-    if os.path.exists(success_file):
-        # return 2, None, None, None
-        sys.exit(2)
-
-    # clean it up if we're starting over :)
-    if os.path.exists(fail_file):
-        os.unlink(fail_file)
-
-    if opts.uniqueext == '':
-        mockcmd = ['/usr/bin/mock',
-                   '--configdir', opts.config_path,
-                   '--resultdir', resdir,
-                   '-r', cfg, ]
-    else:
-        mockcmd = ['/usr/bin/mock',
-                   '--configdir', opts.config_path,
-                   '--resultdir', resdir,
-                   '--uniqueext', opts.uniqueext,
-                   '-r', cfg, ]
-
-    # Ensure repo is up-to-date.
-    # Note: Merely adding --update to mockcmd failed to update
-    mockcmd_update=mockcmd
-    mockcmd_update.append('--update')
-    cmd = subprocess.Popen(
-        mockcmd_update, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    out, err = cmd.communicate()
-    if cmd.returncode != 0:
-        if (isinstance(err, bytes)):
-            err = err.decode("utf-8")
-        sys.stderr.write(err)
-
-    # heuristic here, if user pass for mock "-d foo", but we must be care to leave
-    # "-d'foo bar'" or "--define='foo bar'" as is
-    compiled_re_1 = re.compile(r'^(-\S)\s+(.+)')
-    compiled_re_2 = re.compile(r'^(--[^ =])[ =](\.+)')
-    for option in opts.mock_option:
-        r_match = compiled_re_1.match(option)
-        if r_match:
-            mockcmd.extend([r_match.group(1), r_match.group(2)])
-        else:
-            r_match = compiled_re_2.match(option)
-            if r_match:
-                mockcmd.extend([r_match.group(1), r_match.group(2)])
-            else:
-                mockcmd.append(option)
-
-    print('building %s' % s_pkg)
-    mockcmd.append(pkg)
-    # print("mockcmd: %s" % str(mockcmd))
-    cmd = subprocess.Popen(
-        mockcmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    out, err = cmd.communicate()
-    if cmd.returncode == 0:
-        with open(success_file, 'w') as f:
-            f.write('done\n')
-        ret = 1
-    else:
-        if (isinstance(err, bytes)):
-            err = err.decode("utf-8")
-        sys.stderr.write(err)
-        with open(fail_file, 'w') as f:
-            f.write('undone\n')
-        ret = 0
-
-    # return ret, cmd, out, err
-    sys.exit(ret)
-
-
-def log(lf, msg):
-    if lf:
-        now = time.time()
-        try:
-            with open(lf, 'a') as f:
-                f.write(str(now) + ':' + msg + '\n')
-        except (IOError, OSError) as e:
-            print('Could not write to logfile %s - %s' % (lf, str(e)))
-    print(msg)
-
-
-config_opts = {}
-
-worker_data = []
-workers = 0
-max_workers = 1
-
-build_env = []
-
-failed = []
-built_pkgs = []
-
-local_repo_dir = ""
-
-pkg_to_name={}
-name_to_pkg={}
-srpm_dependencies_direct={}
-rpm_dependencies_direct={}
-rpm_to_srpm_map={}
-no_dep_list = [ "bash", "kernel" , "kernel-rt" ]
-
-
-def init_build_env(slots, opts, config_opts_in):
-    global build_env
-
-    orig_chroot_name=config_opts_in['chroot_name']
-    orig_mock_config = os.path.join(opts.config_path, "{0}.cfg".format(orig_chroot_name))
-    # build_env.append({'state': 'Idle', 'cfg': orig_mock_config})
-    for i in range(0,slots):
-        new_chroot_name = "{0}.b{1}".format(orig_chroot_name, i)
-        new_mock_config = os.path.join(opts.config_path, "{0}.cfg".format(new_chroot_name))
-        tmpfs_size_gb = 0
-        if opts.worker_resources == "":
-            if i > 0:
-                tmpfs_size_gb = 2 * (1 + slots - i)
-        else:
-            resource_array=opts.worker_resources.split(':')
-            if i < len(resource_array):
-                tmpfs_size_gb=int(resource_array[i])
-            else:
-                log(opts.logfile, "Error: worker-resources argument '%s' does not supply info for all %d workers" % (opts.worker_resources, slots))
-                sys.exit(1)
-        if i == 0 and tmpfs_size_gb != 0:
-            log(opts.logfile, "Error: worker-resources argument '%s' must pass '0' as first value" % (opts.worker_resources, slots))
-            sys.exit(1)
-        build_env.append({'state': 'Idle', 'cfg': new_mock_config, 'fs_size_gb': tmpfs_size_gb})
-
-        res, msg = set_build_idx(orig_mock_config, new_mock_config, i, tmpfs_size_gb, opts)
-        if not res:
-            log(opts.logfile, "Error: Could not write out local config: %s" % msg)
-            sys.exit(1)
-
-
-idle_build_env_last_awarded = 0
-def get_idle_build_env(slots):
-    global build_env
-    global idle_build_env_last_awarded
-    visited = 0
-
-    if slots < 1:
-        return -1
-
-    i = idle_build_env_last_awarded - 1
-    if i < 0 or i >= slots:
-        i = slots - 1
-
-    while visited < slots:
-        if build_env[i]['state'] == 'Idle':
-            build_env[i]['state'] = 'Busy'
-            idle_build_env_last_awarded = i
-            return i
-        visited = visited + 1
-        i = i - 1
-        if i < 0:
-            i = slots - 1
-    return -1
-
-def release_build_env(idx):
-    global build_env
-
-    build_env[idx]['state'] = 'Idle'
-
-def get_best_rc(a, b):
-    print("get_best_rc: a=%s" % str(a))
-    print("get_best_rc: b=%s" % str(b))
-    if (b == {}) and (a != {}):
-        return a
-    if (a == {}) and (b != {}):
-        return b
-
-    if (b['build_name'] is None) and (not a['build_name'] is None):
-        return a
-    if (a['build_name'] is None) and (not b['build_name'] is None):
-        return b
-            
-    if a['unbuilt_deps'] < b['unbuilt_deps']:
-        return a
-    if b['unbuilt_deps'] < a['unbuilt_deps']:
-        return b
-
-    if a['depth'] < b['depth']:
-        return a
-    if b['depth'] < a['depth']:
-        return b
-
-    print("get_best_rc: uncertain %s vs %s" % (a,b))
-    return a
-
-unbuilt_dep_list_print=False
-def unbuilt_dep_list(name, unbuilt_pkg_names, depth, checked=None):
-    global srpm_dependencies_direct
-    global rpm_dependencies_direct
-    global rpm_to_srpm_map
-    global no_dep_list
-    global unbuilt_dep_list_print
-
-    first_iteration=False
-    unbuilt = []
-    if name in no_dep_list:
-        return unbuilt
-
-    if checked is None:
-        first_iteration=True
-        checked=[]
-
-    # Count unbuild dependencies
-    if first_iteration:
-        dependencies_direct=srpm_dependencies_direct
-    else:
-        dependencies_direct=rpm_dependencies_direct
-
-    if name in dependencies_direct:
-        for rdep in dependencies_direct[name]:
-            sdep='???'
-            if rdep in rpm_to_srpm_map:
-                sdep = rpm_to_srpm_map[rdep]
-            if rdep != name and sdep != name and not rdep in checked:
-                if (not first_iteration) and (sdep in no_dep_list):
-                    continue
-                checked.append(rdep)
-                if sdep in unbuilt_pkg_names:
-                    if not sdep in unbuilt:
-                        unbuilt.append(sdep)
-                if depth > 0:
-                    child_unbuilt = unbuilt_dep_list(rdep, unbuilt_pkg_names, depth-1, checked)
-                    for sub_sdep in child_unbuilt:
-                        if sub_sdep != name:
-                            if not sub_sdep in unbuilt:
-                                unbuilt.append(sub_sdep)
-
-    return unbuilt
-
-def can_build_at_idx(build_idx, name, opts):
-    global pkg_to_name
-    global name_to_pkg
-    global big_pkgs
-    global big_pkg_names
-    global slow_pkgs
-    global slow_pkg_names
-    global build_env
-
-    fs_size_gb = 0
-    size_gb = 0
-    speed = 0
-    pkg = name_to_pkg[name]
-    if name in big_pkg_names:
-        size_gb=big_pkg_names[name]
-    if pkg in big_pkgs:
-        size_gb=big_pkgs[pkg]
-    if name in slow_pkg_names:
-        speed=slow_pkg_names[name]
-    if pkg in slow_pkgs:
-        speed=slow_pkgs[pkg]
-    fs_size_gb = build_env[build_idx]['fs_size_gb']
-    return fs_size_gb == 0 or fs_size_gb >= size_gb
-
-def schedule(build_idx, pkgs, opts):
-    global worker_data
-    global pkg_to_name
-    global name_to_pkg
-    global big_pkgs
-    global big_pkg_names
-    global slow_pkgs
-    global slow_pkg_names
-
-    unbuilt_pkg_names=[]
-    building_pkg_names=[]
-    unprioritized_pkg_names=[]
-
-    for pkg in pkgs:
-        name = pkg_to_name[pkg]
-        unbuilt_pkg_names.append(name)
-        unprioritized_pkg_names.append(name)
-
-    prioritized_pkg_names=[]
-
-    for wd in worker_data:
-        pkg = wd['pkg']
-        if not pkg is None:
-            name = pkg_to_name[pkg]
-            building_pkg_names.append(name)
-
-    # log(opts.logfile, "schedule: build_idx=%d  start" % build_idx)
-    if len(big_pkg_names) or len(big_pkgs):
-        next_unprioritized_pkg_names = unprioritized_pkg_names[:]
-        for name in unprioritized_pkg_names:
-            pkg = name_to_pkg[name]
-            if name in big_pkg_names or pkg in big_pkgs:
-                prioritized_pkg_names.append(name)
-                next_unprioritized_pkg_names.remove(name)
-        unprioritized_pkg_names = next_unprioritized_pkg_names[:]
-
-    if len(slow_pkg_names) or len(slow_pkgs):
-        next_unprioritized_pkg_names = unprioritized_pkg_names[:]
-        for name in unprioritized_pkg_names:
-            pkg = name_to_pkg[name]
-            if name in slow_pkg_names or pkg in slow_pkgs:
-                if can_build_at_idx(build_idx, name, opts):
-                    prioritized_pkg_names.append(name)
-                    next_unprioritized_pkg_names.remove(name)
-        unprioritized_pkg_names = next_unprioritized_pkg_names[:]
-
-    for name in unprioritized_pkg_names:
-        if can_build_at_idx(build_idx, name, opts):
-            prioritized_pkg_names.append(name)
-
-    name_out = schedule2(build_idx, prioritized_pkg_names, unbuilt_pkg_names, building_pkg_names, opts)
-    if not name_out is None:
-        pkg_out = name_to_pkg[name_out]
-    else:
-        pkg_out = None
-        # log(opts.logfile, "schedule: failed to translate '%s' to a pkg" % name_out)
-    # log(opts.logfile, "schedule: build_idx=%d  end: out = %s -> %s" % (build_idx, str(name_out), str(pkg_out)))
-    return pkg_out
-
-    
-def schedule2(build_idx, pkg_names, unbuilt_pkg_names, building_pkg_names, opts):
-    global pkg_to_name
-    global name_to_pkg
-    global no_dep_list
-
-    max_depth = 3
-
-    if len(pkg_names) == 0:
-        return None
-
-    unbuilt_deps={}
-    building_deps={}
-    for depth in range(max_depth,-1,-1):
-        unbuilt_deps[depth]={}
-        building_deps[depth]={}
-
-    for depth in range(max_depth,-1,-1):
-        checked=[]
-        reordered_pkg_names = pkg_names[:]
-        # for name in reordered_pkg_names:
-        while len(reordered_pkg_names):
-            name = reordered_pkg_names.pop(0)
-            if name in checked:
-                continue
-
-            # log(opts.logfile, "checked.append(%s)" % name)
-            checked.append(name)
-
-            pkg = name_to_pkg[name]
-            # log(opts.logfile, "schedule2: check '%s', depth %d" % (name, depth))
-            if not name in unbuilt_deps[depth]:
-                unbuilt_deps[depth][name] = unbuilt_dep_list(name, unbuilt_pkg_names, depth)
-            if not name in building_deps[depth]:
-                building_deps[depth][name] = unbuilt_dep_list(name, building_pkg_names, depth)
-            # log(opts.logfile, "schedule2: unbuilt deps for pkg=%s, depth=%d: %s" % (name, depth, unbuilt_deps[depth][name]))
-            # log(opts.logfile, "schedule2: building deps for pkg=%s, depth=%d: %s" % (name, depth, building_deps[depth][name]))
-            if len(unbuilt_deps[depth][name]) == 0 and len(building_deps[depth][name]) == 0:
-                if can_build_at_idx(build_idx, name, opts):
-                    log(opts.logfile, "schedule2: no unbuilt deps for '%s', searching at depth %d" % (name, depth))
-                    return name
-                else:
-                    # log(opts.logfile, "schedule2: Can't build '%s' on 'b%d'" % (name, build_idx))
-                    continue
-
-            if not name in unbuilt_deps[0]:
-                unbuilt_deps[0][name] = unbuilt_dep_list(name, unbuilt_pkg_names, 0)
-            if not name in building_deps[0]:
-                building_deps[0][name] = unbuilt_dep_list(name, building_pkg_names, 0)
-            # log(opts.logfile, "schedule2: unbuilt deps for pkg=%s, depth=%d: %s" % (name, 0, unbuilt_deps[0][name]))
-            # log(opts.logfile, "schedule2: building deps for pkg=%s, depth=%d: %s" % (name, 0, building_deps[0][name]))
-            if (len(building_deps[depth][name]) == 0 and len(unbuilt_deps[depth][name]) == 1 and unbuilt_deps[depth][name][0] in no_dep_list) or (len(unbuilt_deps[depth][name]) == 0 and len(building_deps[depth][name]) == 1 and building_deps[depth][name][0] in no_dep_list):
-                if len(unbuilt_deps[0][name]) == 0 and len(building_deps[0][name]) == 0:
-                    if can_build_at_idx(build_idx, name, opts):
-                        log(opts.logfile, "schedule2: no unbuilt deps for '%s' except for indirect kernel dep, searching at depth %d" % (name, depth))
-                        return name
-                    else:
-                        # log(opts.logfile, "schedule2: Can't build '%s' on 'b%d'" % (name, build_idx))
-                        continue
-
-            loop = False
-            for dep_name in unbuilt_deps[depth][name]:
-                if name == dep_name:
-                    continue
-
-                # log(opts.logfile, "name=%s depends on dep_name=%s, depth=%d" % (name, dep_name, depth))
-                if dep_name in checked:
-                    continue
-
-                # log(opts.logfile, "schedule2: check '%s' indirect" % dep_name)
-                if not dep_name in unbuilt_deps[depth]:
-                    unbuilt_deps[depth][dep_name] = unbuilt_dep_list(dep_name, unbuilt_pkg_names, depth)
-                if not dep_name in building_deps[depth]:
-                    building_deps[depth][dep_name] = unbuilt_dep_list(dep_name, building_pkg_names, depth)
-                # log(opts.logfile, "schedule2: deps: unbuilt deps for %s -> %s, depth=%d: %s" % (name, dep_name, depth, unbuilt_deps[depth][dep_name]))
-                # log(opts.logfile, "schedule2: deps: building deps for %s -> %s, depth=%d: %s" % (name, dep_name, depth, building_deps[depth][dep_name]))
-                if len(unbuilt_deps[depth][dep_name]) == 0 and len(building_deps[depth][dep_name]) == 0:
-                    if can_build_at_idx(build_idx, dep_name, opts):
-                        log(opts.logfile, "schedule2: deps: no unbuilt deps for '%s', working towards '%s', searching at depth %d" % (dep_name, name, depth))
-                        return dep_name
-
-                if not dep_name in unbuilt_deps[0]:
-                    unbuilt_deps[0][dep_name] = unbuilt_dep_list(dep_name, unbuilt_pkg_names, 0)
-                if not dep_name in building_deps[0]:
-                    building_deps[0][dep_name] = unbuilt_dep_list(dep_name, building_pkg_names, 0)
-                # log(opts.logfile, "schedule2: deps: unbuilt deps for %s -> %s, depth=%d: %s" % (name, dep_name, 0, unbuilt_deps[0][dep_name]))
-                # log(opts.logfile, "schedule2: deps: building deps for %s -> %s, depth=%d: %s" % (name, dep_name, 0, building_deps[0][dep_name]))
-                if (len(building_deps[depth][dep_name]) == 0 and len(unbuilt_deps[depth][dep_name]) == 1 and unbuilt_deps[depth][dep_name][0] in no_dep_list) or (len(unbuilt_deps[depth][dep_name]) == 0 and len(building_deps[depth][dep_name]) == 1 and building_deps[depth][dep_name][0] in no_dep_list):
-                    if len(unbuilt_deps[0][dep_name]) == 0 and len(building_deps[0][dep_name]) == 0:
-                        if can_build_at_idx(build_idx, dep_name, opts):
-                            log(opts.logfile, "schedule2: no unbuilt deps for '%s' except for indirect kernel dep, working towards '%s', searching at depth %d" % (dep_name, name, depth))
-                            return dep_name
-
-                if name in unbuilt_deps[0][dep_name]:
-                    loop = True
-                    # log(opts.logfile, "schedule2: loop detected: %s <-> %s" % (name, dep_name))
-
-            if loop and len(building_deps[depth][name]) == 0:
-                log(opts.logfile, "schedule2: loop detected, try to build '%s'" % name)
-                return name
-
-            for dep_name in unbuilt_deps[depth][name]:
-                if dep_name in reordered_pkg_names:
-                    # log(opts.logfile, "schedule2: promote %s to work toward %s" % (dep_name, name))
-                    reordered_pkg_names.remove(dep_name)
-                    reordered_pkg_names.insert(0,dep_name)
-
-    # log(opts.logfile, "schedule2: Nothing buildable at this time")
-    return None
-
-
-    
-
-def read_deps(opts):
-    read_srpm_deps(opts)
-    read_rpm_deps(opts)
-    read_map_deps(opts)
-
-def read_srpm_deps(opts):
-    global srpm_dependencies_direct
-
-    if opts.srpm_dependency_file == None:
-        return
-
-    if not os.path.exists(opts.srpm_dependency_file):
-        log(opts.logfile, "File not found: %s" % opts.srpm_dependency_file)
-        sys.exit(1)
-
-    with open(opts.srpm_dependency_file) as f:
-        lines = f.readlines()
-        for line in lines:
-            (name,deps) = line.rstrip().split(';')
-            srpm_dependencies_direct[name]=deps.split(',')
-
-def read_rpm_deps(opts):
-    global rpm_dependencies_direct
-
-    if opts.rpm_dependency_file == None:
-        return
-
-    if not os.path.exists(opts.rpm_dependency_file):
-        log(opts.logfile, "File not found: %s" % opts.rpm_dependency_file)
-        sys.exit(1)
-
-    with open(opts.rpm_dependency_file) as f:
-        lines = f.readlines()
-        for line in lines:
-            (name,deps) = line.rstrip().split(';')
-            rpm_dependencies_direct[name]=deps.split(',')
-
-def read_map_deps(opts):
-    global rpm_to_srpm_map
-
-    if opts.rpm_to_srpm_map_file == None:
-        return
-
-    if not os.path.exists(opts.rpm_to_srpm_map_file):
-        log(opts.logfile, "File not found: %s" % opts.rpm_to_srpm_map_file)
-        sys.exit(1) 
-
-    with open(opts.rpm_to_srpm_map_file) as f:
-        lines = f.readlines()
-        for line in lines:
-            (rpm,srpm) = line.rstrip().split(';')
-            rpm_to_srpm_map[rpm]=srpm
-
-
-def reaper(opts):
-    global built_pkgs
-    global failed
-    global worker_data
-    global workers
-
-    reaped = 0
-    need_createrepo = False
-    last_reaped = -1
-    while reaped > last_reaped:
-        last_reaped = reaped
-        for wd in worker_data:
-            p = wd['proc']
-            ret = p.exitcode
-            if ret is not None:
-                pkg = wd['pkg']
-                b = int(wd['build_index'])
-                p.join()
-                worker_data.remove(wd)
-                workers = workers - 1
-                reaped = reaped + 1
-                release_build_env(b)
-
-                log(opts.logfile, "End build on 'b%d': %s" % (b, pkg))
-
-                if ret == 0:
-                    failed.append(pkg)
-                    log(opts.logfile, "Error building %s on 'b%d'." % (os.path.basename(pkg), b))
-                    if opts.recurse and not stop_signal:
-                        log(opts.logfile, "Will try to build again (if some other package will succeed).")
-                    else:
-                        log(opts.logfile, "See logs/results in %s" % opts.local_repo_dir)
-                elif ret == 1:
-                    log(opts.logfile, "Success building %s on 'b%d'" % (os.path.basename(pkg), b))
-                    built_pkgs.append(pkg)
-                    need_createrepo = True
-                elif ret == 2:
-                    log(opts.logfile, "Skipping already built pkg %s" % os.path.basename(pkg))
-
-    if need_createrepo:
-        # createrepo with the new pkgs
-        err = createrepo(opts.local_repo_dir)[1]
-        if err.strip():
-            log(opts.logfile, "Error making local repo: %s" % opts.local_repo_dir)
-            log(opts.logfile, "Err: %s" % err)
-
-    return reaped
-
-stop_signal = False
-
-def on_terminate(proc):
-    print("process {} terminated with exit code {}".format(proc, proc.returncode))
-
-def kill_proc_and_descentents(parent, need_stop=False, verbose=False):
-    global g_opts
-
-    if need_stop:
-        if verbose:
-            log(g_opts.logfile, "Stop %d" % parent.pid)
-
-        try:
-            parent.send_signal(signal.SIGSTOP)
-        except:
-            # perhaps mock still running as root, give it a sec to drop pivledges and try again
-            time.sleep(1)
-            parent.send_signal(signal.SIGSTOP)
-
-    children = parent.children(recursive=False)
-
-    for p in children:
-        kill_proc_and_descentents(p, need_stop=True, verbose=verbose)
-
-    if verbose:
-        log(g_opts.logfile, "Terminate %d" % parent.pid)
-
-    # parent.send_signal(signal.SIGTERM)
-    try:
-        parent.terminate()
-    except:
-        # perhaps mock still running as root, give it a sec to drop pivledges and try again
-        time.sleep(1)
-        parent.terminate()
-
-    if need_stop:
-        if verbose:
-            log(g_opts.logfile, "Continue %d" % parent.pid)
-
-        parent.send_signal(signal.SIGCONT)
-
-
-def child_signal_handler(signum, frame):
-    global g_opts
-    my_pid = os.getpid()
-    # log(g_opts.logfile, "--------- child %d recieved signal %d" % (my_pid, signum))
-    p = psutil.Process(my_pid)
-    kill_proc_and_descentents(p)
-    try:
-        sys.exit(0)
-    except SystemExit as e:
-        os._exit(0)
-
-def signal_handler(signum, frame):
-    global g_opts
-    global stop_signal
-    global workers
-    global worker_data
-    stop_signal = True
-
-    # Signal processes to complete
-    log(g_opts.logfile, "recieved signal %d, Terminating children" % signum)
-    for wd in worker_data:
-        p = wd['proc']
-        ret = p.exitcode
-        if ret is None:
-            # log(g_opts.logfile, "terminate child %d" % p.pid)
-            p.terminate()
-        else:
-            log(g_opts.logfile, "child return code was %d" % ret)
-
-    # Wait for remaining processes to complete
-    log(g_opts.logfile, "===== wait for signaled jobs to complete =====")
-    while len(worker_data) > 0:
-        log(g_opts.logfile, "    remaining workers: %d" % workers)
-        reaped = reaper(g_opts)
-        if reaped == 0:
-            time.sleep(0.1)
-
-    try:
-        sys.exit(1)
-    except SystemExit as e:
-        os._exit(1)
-
-def main(args):
-    opts, args = parse_args(args)
-    # take mock config + list of pkgs
-
-    global g_opts
-    global stop_signal
-    global build_env
-    global worker_data
-    global workers
-    global max_workers
-
-    global slow_pkg_names
-    global slow_pkgs
-    global big_pkg_names
-    global big_pkgs
-    max_workers = int(opts.max_workers)
-
-    global failed
-    global built_pkgs
-
-    cfg = opts.chroot
-    pkgs = args[1:]
-
-    # transform slow/big package options into dictionaries
-    for line in opts.slow_pkg_names_raw:
-        speed,name = line.split(":")
-        if speed != "":
-            slow_pkg_names[name]=int(speed)
-    for line in opts.slow_pkgs_raw:
-        speed,pkg = line.split(":")
-        if speed != "":
-            slow_pkgs[pkg]=int(speed)
-    for line in opts.big_pkg_names_raw:
-        size_gb,name = line.split(":")
-        if size_gb != "":
-            big_pkg_names[name]=int(size_gb)
-    for line in opts.big_pkgs_raw:
-        size_gb,pkg = line.split(":")
-        if size_gb != "":
-            big_pkgs[pkg]=int(size_gb)
-
-    # Set up a mapping between pkg path and pkg name
-    global pkg_to_name
-    global name_to_pkg
-    for pkg in pkgs:
-        if not pkg.endswith('.rpm'):
-            log(opts.logfile, "%s doesn't appear to be an rpm - skipping" % pkg)
-            continue
-
-        try:
-            name = rpmName(pkg)
-        except OSError as e:
-            print("Could not parse rpm %s" % pkg)
-            sys.exit(1)
-
-        pkg_to_name[pkg] = name
-        name_to_pkg[name] = pkg
-
-    read_deps(opts)
-
-    global config_opts
-    config_opts = mockbuild.util.load_config(mockconfig_path, cfg, None, __VERSION__, PKGPYTHONDIR)
-
-    if not opts.tmp_prefix:
-        try:
-            opts.tmp_prefix = os.getlogin()
-        except OSError as e:
-            print("Could not find login name for tmp dir prefix add --tmp_prefix")
-            sys.exit(1)
-    pid = os.getpid()
-    opts.uniqueext = '%s-%s' % (opts.tmp_prefix, pid)
-
-    if opts.basedir != "/var/lib/mock":
-        opts.uniqueext = ''
-
-    # create a tempdir for our local info
-    if opts.localrepo:
-        local_tmp_dir = os.path.abspath(opts.localrepo)
-        if not os.path.exists(local_tmp_dir):
-            os.makedirs(local_tmp_dir)
-            os.chmod(local_tmp_dir, 0o755)
-    else:
-        pre = 'mock-chain-%s-' % opts.uniqueext
-        local_tmp_dir = tempfile.mkdtemp(prefix=pre, dir='/var/tmp')
-        os.chmod(local_tmp_dir, 0o755)
-
-    if opts.logfile:
-        opts.logfile = os.path.join(local_tmp_dir, opts.logfile)
-        if os.path.exists(opts.logfile):
-            os.unlink(opts.logfile)
-
-    log(opts.logfile, "starting logfile: %s" % opts.logfile)
-  
-    opts.local_repo_dir = os.path.normpath(local_tmp_dir + '/results/' + config_opts['chroot_name'] + '/')
-
-    if not os.path.exists(opts.local_repo_dir):
-        os.makedirs(opts.local_repo_dir, mode=0o755)
-
-    local_baseurl = "file://%s" % opts.local_repo_dir
-    log(opts.logfile, "results dir: %s" % opts.local_repo_dir)
-    opts.config_path = os.path.normpath(local_tmp_dir + '/configs/' + config_opts['chroot_name'] + '/')
-
-    if not os.path.exists(opts.config_path):
-        os.makedirs(opts.config_path, mode=0o755)
-
-    log(opts.logfile, "config dir: %s" % opts.config_path)
-
-    my_mock_config = os.path.join(opts.config_path, "{0}.cfg".format(config_opts['chroot_name']))
-
-    # modify with localrepo
-    res, msg = add_local_repo(config_opts['config_file'], my_mock_config, local_baseurl, 'local_build_repo')
-    if not res:
-        log(opts.logfile, "Error: Could not write out local config: %s" % msg)
-        sys.exit(1)
-
-    for baseurl in opts.repos:
-        res, msg = add_local_repo(my_mock_config, my_mock_config, baseurl)
-        if not res:
-            log(opts.logfile, "Error: Could not add: %s to yum config in mock chroot: %s" % (baseurl, msg))
-            sys.exit(1)
-
-    res, msg = set_basedir(my_mock_config, my_mock_config, opts.basedir, opts)
-    if not res:
-        log(opts.logfile, "Error: Could not write out local config: %s" % msg)
-        sys.exit(1)
-
-    # these files needed from the mock.config dir to make mock run
-    for fn in ['site-defaults.cfg', 'logging.ini']:
-        pth = mockconfig_path + '/' + fn
-        shutil.copyfile(pth, opts.config_path + '/' + fn)
-
-    # createrepo on it
-    err = createrepo(opts.local_repo_dir)[1]
-    if err.strip():
-        log(opts.logfile, "Error making local repo: %s" % opts.local_repo_dir)
-        log(opts.logfile, "Err: %s" % err)
-        sys.exit(1)
-
-    init_build_env(max_workers, opts, config_opts)
-
-    download_dir = tempfile.mkdtemp()
-    downloaded_pkgs = {}
-    built_pkgs = []
-    try_again = True
-    to_be_built = pkgs
-    return_code = 0
-    num_of_tries = 0
-
-    g_opts = opts
-    signal.signal(signal.SIGTERM, signal_handler)
-    signal.signal(signal.SIGINT, signal_handler)
-    signal.signal(signal.SIGHUP, signal_handler)
-    signal.signal(signal.SIGABRT, signal_handler)
-
-    while try_again and not stop_signal:
-        num_of_tries += 1
-        failed = []
-    
-        log(opts.logfile, "===== iteration %d start =====" % num_of_tries)
-
-        to_be_built_scheduled = to_be_built[:]
-
-        need_reap = False
-        while len(to_be_built_scheduled) > 0:
-            # Free up a worker
-            while need_reap or workers >= max_workers:
-                need_reap = False
-                reaped = reaper(opts)
-                if reaped == 0:
-                    time.sleep(0.1)
-                
-            if workers < max_workers:
-                workers = workers + 1
-                
-                b = get_idle_build_env(max_workers)
-                if b < 0:
-                    log(opts.logfile, "Failed to find idle build env for: %s" % pkg)
-                    workers = workers - 1
-                    need_reap = True
-                    continue
-
-                pkg = schedule(b, to_be_built_scheduled, opts)
-                if pkg is None:
-                    if workers <= 1:
-                        # Remember we have one build environmnet reserved, so can't test for zero workers
-                        log(opts.logfile, "failed to schedule from: %s" % to_be_built_scheduled)
-                        pkg = to_be_built_scheduled[0]
-                        log(opts.logfile, "All workers idle, forcing build of pkg=%s" % pkg)
-                    else:
-                        release_build_env(b)
-                        workers = workers - 1
-                        need_reap = True
-                        continue
-
-                to_be_built_scheduled.remove(pkg)
-
-                if not pkg.endswith('.rpm'):
-                    log(opts.logfile, "%s doesn't appear to be an rpm - skipping" % pkg)
-                    failed.append(pkg)
-                    release_build_env(b)
-                    need_reap = True
-                    continue
-
-                elif pkg.startswith('http://') or pkg.startswith('https://') or pkg.startswith('ftp://'):
-                    url = pkg
-                    try:
-                        log(opts.logfile, 'Fetching %s' % url)
-                        r = requests.get(url)
-                        # pylint: disable=no-member
-                        if r.status_code == requests.codes.ok:
-                            fn = urlsplit(r.url).path.rsplit('/', 1)[1]
-                            if 'content-disposition' in r.headers:
-                                _, params = cgi.parse_header(r.headers['content-disposition'])
-                                if 'filename' in params and params['filename']:
-                                    fn = params['filename']
-                            pkg = download_dir + '/' + fn
-                            with open(pkg, 'wb') as fd:
-                                for chunk in r.iter_content(4096):
-                                    fd.write(chunk)
-                    except Exception as e:
-                        log(opts.logfile, 'Error Downloading %s: %s' % (url, str(e)))
-                        failed.append(url)
-                        release_build_env(b)
-                        need_reap = True
-                        continue
-                    else:
-                        downloaded_pkgs[pkg] = url
-
-                log(opts.logfile, "Start build on 'b%d': %s" % (b, pkg))
-                # ret = do_build(opts, config_opts['chroot_name'], pkg)[0]
-                p = multiprocessing.Process(target=do_build, args=(opts, build_env[b]['cfg'], pkg))
-                worker_data.append({'proc': p, 'pkg': pkg, 'build_index': int(b)})
-                p.start()
-
-        # Wait for remaining processes to complete
-        log(opts.logfile, "===== wait for last jobs in iteration %d to complete =====" % num_of_tries)
-        while workers > 0:
-            reaped = reaper(opts)
-            if reaped == 0:
-                time.sleep(0.1)
-        log(opts.logfile, "===== iteration %d complete =====" % num_of_tries)
-                
-        if failed and opts.recurse:
-            log(opts.logfile, "failed=%s" % failed)
-            log(opts.logfile, "to_be_built=%s" % to_be_built)
-            if len(failed) != len(to_be_built):
-                to_be_built = failed
-                try_again = True
-                log(opts.logfile, 'Some package succeeded, some failed.')
-                log(opts.logfile, 'Trying to rebuild %s failed pkgs, because --recurse is set.' % len(failed))
-            else:
-                if max_workers > 1:
-                    max_workers = 1
-                    to_be_built = failed
-                    try_again = True
-                    log(opts.logfile, 'Some package failed under parallel build.')
-                    log(opts.logfile, 'Trying to rebuild %s failed pkgs with single thread, because --recurse is set.' % len(failed))
-                else:
-                    log(opts.logfile, "")
-                    log(opts.logfile, "*** Build Failed ***")
-                    log(opts.logfile, "Tried %s times - following pkgs could not be successfully built:" % num_of_tries)
-                    log(opts.logfile, "*** Build Failed ***")
-                    for pkg in failed:
-                        msg = pkg
-                        if pkg in downloaded_pkgs:
-                            msg = downloaded_pkgs[pkg]
-                        log(opts.logfile, msg)
-                    log(opts.logfile, "")
-                    try_again = False
-        else:
-            try_again = False
-            if failed:
-                return_code = 2
-
-    # cleaning up our download dir
-    shutil.rmtree(download_dir, ignore_errors=True)
-
-    log(opts.logfile, "")
-    log(opts.logfile, "Results out to: %s" % opts.local_repo_dir)
-    log(opts.logfile, "")
-    log(opts.logfile, "Pkgs built: %s" % len(built_pkgs))
-    if built_pkgs:
-        if failed:
-            if len(built_pkgs):
-                log(opts.logfile, "Some packages successfully built in this order:")
-        else:
-            log(opts.logfile, "Packages successfully built in this order:")
-        for pkg in built_pkgs:
-            log(opts.logfile, pkg)
-    return return_code
-
-
-if __name__ == "__main__":
-    sys.exit(main(sys.argv))
diff --git a/build-tools/mockchain-parallel-1.4.16 b/build-tools/mockchain-parallel-1.4.16
deleted file mode 100755
index a65a4b65..00000000
--- a/build-tools/mockchain-parallel-1.4.16
+++ /dev/null
@@ -1,1226 +0,0 @@
-#!/usr/bin/python3.6 -tt
-# -*- coding: utf-8 -*-
-# vim: noai:ts=4:sw=4:expandtab
-
-# by skvidal@fedoraproject.org
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU Library General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301  USA.
-# copyright 2012 Red Hat, Inc.
-
-# SUMMARY
-# mockchain
-# take a mock config and a series of srpms
-# rebuild them one at a time
-# adding each to a local repo
-# so they are available as build deps to next pkg being built
-from __future__ import print_function
-
-import cgi
-# pylint: disable=deprecated-module
-import optparse
-import os
-import re
-import shutil
-import subprocess
-import sys
-import tempfile
-import time
-import multiprocessing
-import signal
-import psutil
-
-import requests
-# pylint: disable=import-error
-from six.moves.urllib_parse import urlsplit
-
-import mockbuild.util
-
-from stxRpmUtils import splitRpmFilename
-
-
-# all of the variables below are substituted by the build system
-__VERSION__="1.4.16"
-SYSCONFDIR="/etc"
-PYTHONDIR="/usr/lib/python3.6/site-packages"
-PKGPYTHONDIR="/usr/lib/python3.6/site-packages/mockbuild"
-MOCKCONFDIR = os.path.join(SYSCONFDIR, "mock")
-# end build system subs
-
-mockconfig_path = '/etc/mock'
-
-def rpmName(path):
-    filename = os.path.basename(path)
-    (n, v, r, e, a) = splitRpmFilename(filename)
-    return n
-
-def createrepo(path):
-    global max_workers
-    if os.path.exists(path + '/repodata/repomd.xml'):
-        comm = ['/usr/bin/createrepo_c', '--update', '--retain-old-md', "%d" % max_workers, "--workers", "%d" % max_workers, path]
-    else:
-        comm = ['/usr/bin/createrepo_c', "--workers", "%d" % max_workers, path]
-    cmd = subprocess.Popen(
-        comm, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    out, err = cmd.communicate()
-    return out, err
-
-
-g_opts = optparse.Values()
-
-def parse_args(args):
-    parser = optparse.OptionParser('\nmockchain -r mockcfg pkg1 [pkg2] [pkg3]')
-    parser.add_option(
-        '-r', '--root', default=None, dest='chroot',
-        metavar="CONFIG",
-        help="chroot config name/base to use in the mock build")
-    parser.add_option(
-        '-l', '--localrepo', default=None,
-        help="local path for the local repo, defaults to making its own")
-    parser.add_option(
-        '-c', '--continue', default=False, action='store_true',
-        dest='cont',
-        help="if a pkg fails to build, continue to the next one")
-    parser.add_option(
-        '-a', '--addrepo', default=[], action='append',
-        dest='repos',
-        help="add these repo baseurls to the chroot's yum config")
-    parser.add_option(
-        '--recurse', default=False, action='store_true',
-        help="if more than one pkg and it fails to build, try to build the rest and come back to it")
-    parser.add_option(
-        '--log', default=None, dest='logfile',
-        help="log to the file named by this option, defaults to not logging")
-    parser.add_option(
-        '--workers', default=1, dest='max_workers',
-        help="number of parallel build jobs")
-    parser.add_option(
-        '--worker-resources', default="", dest='worker_resources',
-        help="colon seperated list, how much mem in gb for each workers temfs")
-    parser.add_option(
-        '--basedir', default='/var/lib/mock', dest='basedir',
-        help="path to workspace")
-    parser.add_option(
-        '--tmp_prefix', default=None, dest='tmp_prefix',
-        help="tmp dir prefix - will default to username-pid if not specified")
-    parser.add_option(
-        '-m', '--mock-option', default=[], action='append',
-        dest='mock_option',
-        help="option to pass directly to mock")
-    parser.add_option(
-        '--mark-slow-name', default=[], action='append',
-        dest='slow_pkg_names_raw',
-        help="package name that is known to build slowly")
-    parser.add_option(
-        '--mark-slow-path', default=[], action='append',
-        dest='slow_pkgs_raw',
-        help="package path that is known to build slowly")
-    parser.add_option(
-        '--mark-big-name', default=[], action='append',
-        dest='big_pkg_names_raw',
-        help="package name that is known to require a lot of disk space to build")
-    parser.add_option(
-        '--mark-big-path', default=[], action='append',
-        dest='big_pkgs_raw',
-        help="package path that is known to require a lot of disk space to build")
-    parser.add_option(
-        '--srpm-dependency-file', default=None, 
-        dest='srpm_dependency_file',
-        help="path to srpm dependency file")
-    parser.add_option(
-        '--rpm-dependency-file', default=None, 
-        dest='rpm_dependency_file',
-        help="path to rpm dependency file")
-    parser.add_option(
-        '--rpm-to-srpm-map-file', default=None, 
-        dest='rpm_to_srpm_map_file',
-        help="path to rpm to srpm map file")
-
-    opts, args = parser.parse_args(args)
-    if opts.recurse:
-        opts.cont = True
-
-    if not opts.chroot:
-        print("You must provide an argument to -r for the mock chroot")
-        sys.exit(1)
-
-    if len(sys.argv) < 3:
-        print("You must specify at least 1 package to build")
-        sys.exit(1)
-
-    return opts, args
-
-
-REPOS_ID = []
-
-slow_pkg_names={}
-slow_pkgs={}
-big_pkg_names={}
-big_pkgs={}
-
-def generate_repo_id(baseurl):
-    """ generate repository id for yum.conf out of baseurl """
-    repoid = "/".join(baseurl.split('//')[1:]).replace('/', '_')
-    repoid = re.sub(r'[^a-zA-Z0-9_]', '', repoid)
-    suffix = ''
-    i = 1
-    while repoid + suffix in REPOS_ID:
-        suffix = str(i)
-        i += 1
-    repoid = repoid + suffix
-    REPOS_ID.append(repoid)
-    return repoid
-
-
-def set_build_idx(infile, destfile, build_idx, tmpfs_size_gb, opts):
-    # log(opts.logfile, "set_build_idx: infile=%s, destfile=%s, build_idx=%d, tmpfs_size_gb=%d" % (infile, destfile, build_idx, tmpfs_size_gb))
-
-    try:
-        with open(infile) as f:
-            code = compile(f.read(), infile, 'exec')
-        # pylint: disable=exec-used
-        exec(code)
-
-        config_opts['root'] = config_opts['root'].replace('b0', 'b{0}'.format(build_idx))
-        config_opts['cache_topdir'] = config_opts['cache_topdir'].replace('b0', 'b{0}'.format(build_idx))
-        # log(opts.logfile, "set_build_idx: root=%s" % config_opts['root'])
-        # log(opts.logfile, "set_build_idx: cache_topdir=%s" % config_opts['cache_topdir'])
-        if tmpfs_size_gb > 0:
-            config_opts['plugin_conf']['tmpfs_enable'] = True
-            config_opts['plugin_conf']['tmpfs_opts'] = {}
-            config_opts['plugin_conf']['tmpfs_opts']['required_ram_mb'] = 1024
-            config_opts['plugin_conf']['tmpfs_opts']['max_fs_size'] = "%dg" % tmpfs_size_gb
-            config_opts['plugin_conf']['tmpfs_opts']['mode'] = '0755'
-            config_opts['plugin_conf']['tmpfs_opts']['keep_mounted'] = True
-            # log(opts.logfile, "set_build_idx: plugin_conf->tmpfs_enable=%s" % config_opts['plugin_conf']['tmpfs_enable'])
-            # log(opts.logfile, "set_build_idx: plugin_conf->tmpfs_opts->max_fs_size=%s" % config_opts['plugin_conf']['tmpfs_opts']['max_fs_size'])
-
-        with open(destfile, 'w') as br_dest:
-            for k, v in list(config_opts.items()):
-                br_dest.write("config_opts[%r] = %r\n" % (k, v))
-
-        try:
-            log(opts.logfile, "set_build_idx: os.makedirs %s" % config_opts['cache_topdir'])
-            if not os.path.isdir(config_opts['cache_topdir']):
-                os.makedirs(config_opts['cache_topdir'], exist_ok=True)
-        except (IOError, OSError):
-            return False, "Could not create dir: %s" % config_opts['cache_topdir']
-
-        cache_dir = "%s/%s/mock" % (config_opts['basedir'], config_opts['root'])
-        try:
-            log(opts.logfile, "set_build_idx: os.makedirs %s" % cache_dir)
-            if not os.path.isdir(cache_dir):
-                os.makedirs(cache_dir)
-        except (IOError, OSError):
-            return False, "Could not create dir: %s" % cache_dir
-
-        return True, ''
-    except (IOError, OSError):
-        return False, "Could not write mock config to %s" % destfile
-
-    return True, ''
-
-def set_basedir(infile, destfile, basedir, opts):
-    log(opts.logfile, "set_basedir: infile=%s, destfile=%s, basedir=%s" % (infile, destfile, basedir))
-    try:
-        with open(infile) as f:
-            code = compile(f.read(), infile, 'exec')
-        # pylint: disable=exec-used
-        exec(code)
-
-        config_opts['basedir'] = basedir
-        config_opts['resultdir'] = '{0}/result'.format(basedir)
-        config_opts['backup_base_dir'] = '{0}/backup'.format(basedir)
-        config_opts['root'] = 'mock/b0'
-        config_opts['cache_topdir'] = '{0}/cache/b0'.format(basedir)
-
-        with open(destfile, 'w') as br_dest:
-            for k, v in list(config_opts.items()):
-                br_dest.write("config_opts[%r] = %r\n" % (k, v))
-        return True, ''
-    except (IOError, OSError):
-        return False, "Could not write mock config to %s" % destfile
-
-    return True, ''
-
-def add_local_repo(infile, destfile, baseurl, repoid=None):
-    """take a mock chroot config and add a repo to it's yum.conf
-       infile = mock chroot config file
-       destfile = where to save out the result
-       baseurl = baseurl of repo you wish to add"""
-    # pylint: disable=global-variable-not-assigned
-    global config_opts
-
-    try:
-        with open(infile) as f:
-            code = compile(f.read(), infile, 'exec')
-        # pylint: disable=exec-used
-        exec(code)
-        if not repoid:
-            repoid = generate_repo_id(baseurl)
-        else:
-            REPOS_ID.append(repoid)
-        localyumrepo = """
-[%s]
-name=%s
-baseurl=%s
-enabled=1
-skip_if_unavailable=1
-metadata_expire=0
-cost=1
-best=1
-""" % (repoid, baseurl, baseurl)
-
-        config_opts['yum.conf'] += localyumrepo
-        with open(destfile, 'w') as br_dest:
-            for k, v in list(config_opts.items()):
-                br_dest.write("config_opts[%r] = %r\n" % (k, v))
-        return True, ''
-    except (IOError, OSError):
-        return False, "Could not write mock config to %s" % destfile
-
-    return True, ''
-
-
-def do_build(opts, cfg, pkg):
-
-    # returns 0, cmd, out, err = failure
-    # returns 1, cmd, out, err  = success
-    # returns 2, None, None, None = already built
-
-    signal.signal(signal.SIGTERM, child_signal_handler)
-    signal.signal(signal.SIGINT, child_signal_handler)
-    signal.signal(signal.SIGHUP, child_signal_handler)
-    signal.signal(signal.SIGABRT, child_signal_handler)
-    s_pkg = os.path.basename(pkg)
-    pdn = s_pkg.replace('.src.rpm', '')
-    resdir = '%s/%s' % (opts.local_repo_dir, pdn)
-    resdir = os.path.normpath(resdir)
-    if not os.path.exists(resdir):
-        os.makedirs(resdir)
-
-    success_file = resdir + '/success'
-    fail_file = resdir + '/fail'
-
-    if os.path.exists(success_file):
-        # return 2, None, None, None
-        sys.exit(2)
-
-    # clean it up if we're starting over :)
-    if os.path.exists(fail_file):
-        os.unlink(fail_file)
-
-    if opts.uniqueext == '':
-        mockcmd = ['/usr/bin/mock',
-                   '--configdir', opts.config_path,
-                   '--resultdir', resdir,
-                   '-r', cfg, ]
-    else:
-        mockcmd = ['/usr/bin/mock',
-                   '--configdir', opts.config_path,
-                   '--resultdir', resdir,
-                   '--uniqueext', opts.uniqueext,
-                   '-r', cfg, ]
-
-    # Ensure repo is up-to-date.
-    # Note: Merely adding --update to mockcmd failed to update
-    mockcmd_update=mockcmd
-    mockcmd_update.append('--update')
-    cmd = subprocess.Popen(
-        mockcmd_update, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    out, err = cmd.communicate()
-    if cmd.returncode != 0:
-        if (isinstance(err, bytes)):
-            err = err.decode("utf-8")
-        sys.stderr.write(err)
-
-    # heuristic here, if user pass for mock "-d foo", but we must be care to leave
-    # "-d'foo bar'" or "--define='foo bar'" as is
-    compiled_re_1 = re.compile(r'^(-\S)\s+(.+)')
-    compiled_re_2 = re.compile(r'^(--[^ =])[ =](\.+)')
-    for option in opts.mock_option:
-        r_match = compiled_re_1.match(option)
-        if r_match:
-            mockcmd.extend([r_match.group(1), r_match.group(2)])
-        else:
-            r_match = compiled_re_2.match(option)
-            if r_match:
-                mockcmd.extend([r_match.group(1), r_match.group(2)])
-            else:
-                mockcmd.append(option)
-
-    print('building %s' % s_pkg)
-    mockcmd.append(pkg)
-    # print("mockcmd: %s" % str(mockcmd))
-    cmd = subprocess.Popen(
-        mockcmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    out, err = cmd.communicate()
-    if cmd.returncode == 0:
-        with open(success_file, 'w') as f:
-            f.write('done\n')
-        ret = 1
-    else:
-        if (isinstance(err, bytes)):
-            err = err.decode("utf-8")
-        sys.stderr.write(err)
-        with open(fail_file, 'w') as f:
-            f.write('undone\n')
-        ret = 0
-
-    # return ret, cmd, out, err
-    sys.exit(ret)
-
-
-def log(lf, msg):
-    if lf:
-        now = time.time()
-        try:
-            with open(lf, 'a') as f:
-                f.write(str(now) + ':' + msg + '\n')
-        except (IOError, OSError) as e:
-            print('Could not write to logfile %s - %s' % (lf, str(e)))
-    print(msg)
-
-
-config_opts = mockbuild.util.TemplatedDictionary()
-
-worker_data = []
-workers = 0
-max_workers = 1
-
-build_env = []
-
-failed = []
-built_pkgs = []
-
-local_repo_dir = ""
-
-pkg_to_name={}
-name_to_pkg={}
-srpm_dependencies_direct={}
-rpm_dependencies_direct={}
-rpm_to_srpm_map={}
-no_dep_list = [ "bash", "kernel" , "kernel-rt" ]
-
-
-def init_build_env(slots, opts, config_opts_in):
-    global build_env
-
-    orig_chroot_name=config_opts_in['chroot_name']
-    orig_mock_config = os.path.join(opts.config_path, "{0}.cfg".format(orig_chroot_name))
-    # build_env.append({'state': 'Idle', 'cfg': orig_mock_config})
-    for i in range(0,slots):
-        new_chroot_name = "{0}.b{1}".format(orig_chroot_name, i)
-        new_mock_config = os.path.join(opts.config_path, "{0}.cfg".format(new_chroot_name))
-        tmpfs_size_gb = 0
-        if opts.worker_resources == "":
-            if i > 0:
-                tmpfs_size_gb = 2 * (1 + slots - i)
-        else:
-            resource_array=opts.worker_resources.split(':')
-            if i < len(resource_array):
-                tmpfs_size_gb=int(resource_array[i])
-            else:
-                log(opts.logfile, "Error: worker-resources argument '%s' does not supply info for all %d workers" % (opts.worker_resources, slots))
-                sys.exit(1)
-        if i == 0 and tmpfs_size_gb != 0:
-            log(opts.logfile, "Error: worker-resources argument '%s' must pass '0' as first value" % (opts.worker_resources, slots))
-            sys.exit(1)
-        build_env.append({'state': 'Idle', 'cfg': new_mock_config, 'fs_size_gb': tmpfs_size_gb})
-
-        res, msg = set_build_idx(orig_mock_config, new_mock_config, i, tmpfs_size_gb, opts)
-        if not res:
-            log(opts.logfile, "Error: Could not write out local config: %s" % msg)
-            sys.exit(1)
-
-
-idle_build_env_last_awarded = 0
-def get_idle_build_env(slots):
-    global build_env
-    global idle_build_env_last_awarded
-    visited = 0
-
-    if slots < 1:
-        return -1
-
-    i = idle_build_env_last_awarded - 1
-    if i < 0 or i >= slots:
-        i = slots - 1
-
-    while visited < slots:
-        if build_env[i]['state'] == 'Idle':
-            build_env[i]['state'] = 'Busy'
-            idle_build_env_last_awarded = i
-            return i
-        visited = visited + 1
-        i = i - 1
-        if i < 0:
-            i = slots - 1
-    return -1
-
-def release_build_env(idx):
-    global build_env
-
-    build_env[idx]['state'] = 'Idle'
-
-def get_best_rc(a, b):
-    print("get_best_rc: a=%s" % str(a))
-    print("get_best_rc: b=%s" % str(b))
-    if (b == {}) and (a != {}):
-        return a
-    if (a == {}) and (b != {}):
-        return b
-
-    if (b['build_name'] is None) and (not a['build_name'] is None):
-        return a
-    if (a['build_name'] is None) and (not b['build_name'] is None):
-        return b
-            
-    if a['unbuilt_deps'] < b['unbuilt_deps']:
-        return a
-    if b['unbuilt_deps'] < a['unbuilt_deps']:
-        return b
-
-    if a['depth'] < b['depth']:
-        return a
-    if b['depth'] < a['depth']:
-        return b
-
-    print("get_best_rc: uncertain %s vs %s" % (a,b))
-    return a
-
-unbuilt_dep_list_print=False
-def unbuilt_dep_list(name, unbuilt_pkg_names, depth, checked=None):
-    global srpm_dependencies_direct
-    global rpm_dependencies_direct
-    global rpm_to_srpm_map
-    global no_dep_list
-    global unbuilt_dep_list_print
-
-    first_iteration=False
-    unbuilt = []
-    if name in no_dep_list:
-        return unbuilt
-
-    if checked is None:
-        first_iteration=True
-        checked=[]
-
-    # Count unbuild dependencies
-    if first_iteration:
-        dependencies_direct=srpm_dependencies_direct
-    else:
-        dependencies_direct=rpm_dependencies_direct
-
-    if name in dependencies_direct:
-        for rdep in dependencies_direct[name]:
-            sdep='???'
-            if rdep in rpm_to_srpm_map:
-                sdep = rpm_to_srpm_map[rdep]
-            if rdep != name and sdep != name and not rdep in checked:
-                if (not first_iteration) and (sdep in no_dep_list):
-                    continue
-                checked.append(rdep)
-                if sdep in unbuilt_pkg_names:
-                    if not sdep in unbuilt:
-                        unbuilt.append(sdep)
-                if depth > 0:
-                    child_unbuilt = unbuilt_dep_list(rdep, unbuilt_pkg_names, depth-1, checked)
-                    for sub_sdep in child_unbuilt:
-                        if sub_sdep != name:
-                            if not sub_sdep in unbuilt:
-                                unbuilt.append(sub_sdep)
-
-    return unbuilt
-
-def can_build_at_idx(build_idx, name, opts):
-    global pkg_to_name
-    global name_to_pkg
-    global big_pkgs
-    global big_pkg_names
-    global slow_pkgs
-    global slow_pkg_names
-    global build_env
-
-    fs_size_gb = 0
-    size_gb = 0
-    speed = 0
-    pkg = name_to_pkg[name]
-    if name in big_pkg_names:
-        size_gb=big_pkg_names[name]
-    if pkg in big_pkgs:
-        size_gb=big_pkgs[pkg]
-    if name in slow_pkg_names:
-        speed=slow_pkg_names[name]
-    if pkg in slow_pkgs:
-        speed=slow_pkgs[pkg]
-    fs_size_gb = build_env[build_idx]['fs_size_gb']
-    return fs_size_gb == 0 or fs_size_gb >= size_gb
-
-def schedule(build_idx, pkgs, opts):
-    global worker_data
-    global pkg_to_name
-    global name_to_pkg
-    global big_pkgs
-    global big_pkg_names
-    global slow_pkgs
-    global slow_pkg_names
-
-    unbuilt_pkg_names=[]
-    building_pkg_names=[]
-    unprioritized_pkg_names=[]
-
-    for pkg in pkgs:
-        name = pkg_to_name[pkg]
-        unbuilt_pkg_names.append(name)
-        unprioritized_pkg_names.append(name)
-
-    prioritized_pkg_names=[]
-
-    for wd in worker_data:
-        pkg = wd['pkg']
-        if not pkg is None:
-            name = pkg_to_name[pkg]
-            building_pkg_names.append(name)
-
-    # log(opts.logfile, "schedule: build_idx=%d  start" % build_idx)
-    if len(big_pkg_names) or len(big_pkgs):
-        next_unprioritized_pkg_names = unprioritized_pkg_names[:]
-        for name in unprioritized_pkg_names:
-            pkg = name_to_pkg[name]
-            if name in big_pkg_names or pkg in big_pkgs:
-                prioritized_pkg_names.append(name)
-                next_unprioritized_pkg_names.remove(name)
-        unprioritized_pkg_names = next_unprioritized_pkg_names[:]
-
-    if len(slow_pkg_names) or len(slow_pkgs):
-        next_unprioritized_pkg_names = unprioritized_pkg_names[:]
-        for name in unprioritized_pkg_names:
-            pkg = name_to_pkg[name]
-            if name in slow_pkg_names or pkg in slow_pkgs:
-                if can_build_at_idx(build_idx, name, opts):
-                    prioritized_pkg_names.append(name)
-                    next_unprioritized_pkg_names.remove(name)
-        unprioritized_pkg_names = next_unprioritized_pkg_names[:]
-
-    for name in unprioritized_pkg_names:
-        if can_build_at_idx(build_idx, name, opts):
-            prioritized_pkg_names.append(name)
-
-    name_out = schedule2(build_idx, prioritized_pkg_names, unbuilt_pkg_names, building_pkg_names, opts)
-    if not name_out is None:
-        pkg_out = name_to_pkg[name_out]
-    else:
-        pkg_out = None
-        # log(opts.logfile, "schedule: failed to translate '%s' to a pkg" % name_out)
-    # log(opts.logfile, "schedule: build_idx=%d  end: out = %s -> %s" % (build_idx, str(name_out), str(pkg_out)))
-    return pkg_out
-
-    
-def schedule2(build_idx, pkg_names, unbuilt_pkg_names, building_pkg_names, opts):
-    global pkg_to_name
-    global name_to_pkg
-    global no_dep_list
-
-    max_depth = 3
-
-    if len(pkg_names) == 0:
-        return None
-
-    unbuilt_deps={}
-    building_deps={}
-    for depth in range(max_depth,-1,-1):
-        unbuilt_deps[depth]={}
-        building_deps[depth]={}
-
-    for depth in range(max_depth,-1,-1):
-        checked=[]
-        reordered_pkg_names = pkg_names[:]
-        # for name in reordered_pkg_names:
-        while len(reordered_pkg_names):
-            name = reordered_pkg_names.pop(0)
-            if name in checked:
-                continue
-
-            # log(opts.logfile, "checked.append(%s)" % name)
-            checked.append(name)
-
-            pkg = name_to_pkg[name]
-            # log(opts.logfile, "schedule2: check '%s', depth %d" % (name, depth))
-            if not name in unbuilt_deps[depth]:
-                unbuilt_deps[depth][name] = unbuilt_dep_list(name, unbuilt_pkg_names, depth)
-            if not name in building_deps[depth]:
-                building_deps[depth][name] = unbuilt_dep_list(name, building_pkg_names, depth)
-            # log(opts.logfile, "schedule2: unbuilt deps for pkg=%s, depth=%d: %s" % (name, depth, unbuilt_deps[depth][name]))
-            # log(opts.logfile, "schedule2: building deps for pkg=%s, depth=%d: %s" % (name, depth, building_deps[depth][name]))
-            if len(unbuilt_deps[depth][name]) == 0 and len(building_deps[depth][name]) == 0:
-                if can_build_at_idx(build_idx, name, opts):
-                    log(opts.logfile, "schedule2: no unbuilt deps for '%s', searching at depth %d" % (name, depth))
-                    return name
-                else:
-                    # log(opts.logfile, "schedule2: Can't build '%s' on 'b%d'" % (name, build_idx))
-                    continue
-
-            if not name in unbuilt_deps[0]:
-                unbuilt_deps[0][name] = unbuilt_dep_list(name, unbuilt_pkg_names, 0)
-            if not name in building_deps[0]:
-                building_deps[0][name] = unbuilt_dep_list(name, building_pkg_names, 0)
-            # log(opts.logfile, "schedule2: unbuilt deps for pkg=%s, depth=%d: %s" % (name, 0, unbuilt_deps[0][name]))
-            # log(opts.logfile, "schedule2: building deps for pkg=%s, depth=%d: %s" % (name, 0, building_deps[0][name]))
-            if (len(building_deps[depth][name]) == 0 and len(unbuilt_deps[depth][name]) == 1 and unbuilt_deps[depth][name][0] in no_dep_list) or (len(unbuilt_deps[depth][name]) == 0 and len(building_deps[depth][name]) == 1 and building_deps[depth][name][0] in no_dep_list):
-                if len(unbuilt_deps[0][name]) == 0 and len(building_deps[0][name]) == 0:
-                    if can_build_at_idx(build_idx, name, opts):
-                        log(opts.logfile, "schedule2: no unbuilt deps for '%s' except for indirect kernel dep, searching at depth %d" % (name, depth))
-                        return name
-                    else:
-                        # log(opts.logfile, "schedule2: Can't build '%s' on 'b%d'" % (name, build_idx))
-                        continue
-
-            loop = False
-            for dep_name in unbuilt_deps[depth][name]:
-                if name == dep_name:
-                    continue
-
-                # log(opts.logfile, "name=%s depends on dep_name=%s, depth=%d" % (name, dep_name, depth))
-                if dep_name in checked:
-                    continue
-
-                # log(opts.logfile, "schedule2: check '%s' indirect" % dep_name)
-                if not dep_name in unbuilt_deps[depth]:
-                    unbuilt_deps[depth][dep_name] = unbuilt_dep_list(dep_name, unbuilt_pkg_names, depth)
-                if not dep_name in building_deps[depth]:
-                    building_deps[depth][dep_name] = unbuilt_dep_list(dep_name, building_pkg_names, depth)
-                # log(opts.logfile, "schedule2: deps: unbuilt deps for %s -> %s, depth=%d: %s" % (name, dep_name, depth, unbuilt_deps[depth][dep_name]))
-                # log(opts.logfile, "schedule2: deps: building deps for %s -> %s, depth=%d: %s" % (name, dep_name, depth, building_deps[depth][dep_name]))
-                if len(unbuilt_deps[depth][dep_name]) == 0 and len(building_deps[depth][dep_name]) == 0:
-                    if can_build_at_idx(build_idx, dep_name, opts):
-                        log(opts.logfile, "schedule2: deps: no unbuilt deps for '%s', working towards '%s', searching at depth %d" % (dep_name, name, depth))
-                        return dep_name
-
-                if not dep_name in unbuilt_deps[0]:
-                    unbuilt_deps[0][dep_name] = unbuilt_dep_list(dep_name, unbuilt_pkg_names, 0)
-                if not dep_name in building_deps[0]:
-                    building_deps[0][dep_name] = unbuilt_dep_list(dep_name, building_pkg_names, 0)
-                # log(opts.logfile, "schedule2: deps: unbuilt deps for %s -> %s, depth=%d: %s" % (name, dep_name, 0, unbuilt_deps[0][dep_name]))
-                # log(opts.logfile, "schedule2: deps: building deps for %s -> %s, depth=%d: %s" % (name, dep_name, 0, building_deps[0][dep_name]))
-                if (len(building_deps[depth][dep_name]) == 0 and len(unbuilt_deps[depth][dep_name]) == 1 and unbuilt_deps[depth][dep_name][0] in no_dep_list) or (len(unbuilt_deps[depth][dep_name]) == 0 and len(building_deps[depth][dep_name]) == 1 and building_deps[depth][dep_name][0] in no_dep_list):
-                    if len(unbuilt_deps[0][dep_name]) == 0 and len(building_deps[0][dep_name]) == 0:
-                        if can_build_at_idx(build_idx, dep_name, opts):
-                            log(opts.logfile, "schedule2: no unbuilt deps for '%s' except for indirect kernel dep, working towards '%s', searching at depth %d" % (dep_name, name, depth))
-                            return dep_name
-
-                if name in unbuilt_deps[0][dep_name]:
-                    loop = True
-                    # log(opts.logfile, "schedule2: loop detected: %s <-> %s" % (name, dep_name))
-
-            if loop and len(building_deps[depth][name]) == 0:
-                log(opts.logfile, "schedule2: loop detected, try to build '%s'" % name)
-                return name
-
-            for dep_name in unbuilt_deps[depth][name]:
-                if dep_name in reordered_pkg_names:
-                    # log(opts.logfile, "schedule2: promote %s to work toward %s" % (dep_name, name))
-                    reordered_pkg_names.remove(dep_name)
-                    reordered_pkg_names.insert(0,dep_name)
-
-    # log(opts.logfile, "schedule2: Nothing buildable at this time")
-    return None
-
-
-    
-
-def read_deps(opts):
-    read_srpm_deps(opts)
-    read_rpm_deps(opts)
-    read_map_deps(opts)
-
-def read_srpm_deps(opts):
-    global srpm_dependencies_direct
-
-    if opts.srpm_dependency_file == None:
-        return
-
-    if not os.path.exists(opts.srpm_dependency_file):
-        log(opts.logfile, "File not found: %s" % opts.srpm_dependency_file)
-        sys.exit(1)
-
-    with open(opts.srpm_dependency_file) as f:
-        lines = f.readlines()
-        for line in lines:
-            (name,deps) = line.rstrip().split(';')
-            srpm_dependencies_direct[name]=deps.split(',')
-
-def read_rpm_deps(opts):
-    global rpm_dependencies_direct
-
-    if opts.rpm_dependency_file == None:
-        return
-
-    if not os.path.exists(opts.rpm_dependency_file):
-        log(opts.logfile, "File not found: %s" % opts.rpm_dependency_file)
-        sys.exit(1)
-
-    with open(opts.rpm_dependency_file) as f:
-        lines = f.readlines()
-        for line in lines:
-            (name,deps) = line.rstrip().split(';')
-            rpm_dependencies_direct[name]=deps.split(',')
-
-def read_map_deps(opts):
-    global rpm_to_srpm_map
-
-    if opts.rpm_to_srpm_map_file == None:
-        return
-
-    if not os.path.exists(opts.rpm_to_srpm_map_file):
-        log(opts.logfile, "File not found: %s" % opts.rpm_to_srpm_map_file)
-        sys.exit(1) 
-
-    with open(opts.rpm_to_srpm_map_file) as f:
-        lines = f.readlines()
-        for line in lines:
-            (rpm,srpm) = line.rstrip().split(';')
-            rpm_to_srpm_map[rpm]=srpm
-
-
-def reaper(opts):
-    global built_pkgs
-    global failed
-    global worker_data
-    global workers
-
-    reaped = 0
-    need_createrepo = False
-    last_reaped = -1
-    while reaped > last_reaped:
-        last_reaped = reaped
-        for wd in worker_data:
-            p = wd['proc']
-            ret = p.exitcode
-            if ret is not None:
-                pkg = wd['pkg']
-                b = int(wd['build_index'])
-                p.join()
-                worker_data.remove(wd)
-                workers = workers - 1
-                reaped = reaped + 1
-                release_build_env(b)
-
-                log(opts.logfile, "End build on 'b%d': %s" % (b, pkg))
-
-                if ret == 0:
-                    failed.append(pkg)
-                    log(opts.logfile, "Error building %s on 'b%d'." % (os.path.basename(pkg), b))
-                    if opts.recurse and not stop_signal:
-                        log(opts.logfile, "Will try to build again (if some other package will succeed).")
-                    else:
-                        log(opts.logfile, "See logs/results in %s" % opts.local_repo_dir)
-                        if not opts.cont:
-                            sys.exit(1)
-                elif ret == 1:
-                    log(opts.logfile, "Success building %s on 'b%d'" % (os.path.basename(pkg), b))
-                    built_pkgs.append(pkg)
-                    need_createrepo = True
-                elif ret == 2:
-                    log(opts.logfile, "Skipping already built pkg %s" % os.path.basename(pkg))
-
-    if need_createrepo:
-        # createrepo with the new pkgs
-        err = createrepo(opts.local_repo_dir)[1]
-        if err.strip():
-            log(opts.logfile, "Error making local repo: %s" % opts.local_repo_dir)
-            log(opts.logfile, "Err: %s" % err)
-
-    return reaped
-
-stop_signal = False
-
-def on_terminate(proc):
-    print("process {} terminated with exit code {}".format(proc, proc.returncode))
-
-def kill_proc_and_descentents(parent, need_stop=False, verbose=False):
-    global g_opts
-
-    if need_stop:
-        if verbose:
-            log(g_opts.logfile, "Stop %d" % parent.pid)
-
-        try:
-            parent.send_signal(signal.SIGSTOP)
-        except:
-            # perhaps mock still running as root, give it a sec to drop pivledges and try again
-            time.sleep(1)
-            parent.send_signal(signal.SIGSTOP)
-
-    children = parent.children(recursive=False)
-
-    for p in children:
-        kill_proc_and_descentents(p, need_stop=True, verbose=verbose)
-
-    if verbose:
-        log(g_opts.logfile, "Terminate %d" % parent.pid)
-
-    # parent.send_signal(signal.SIGTERM)
-    try:
-        parent.terminate()
-    except:
-        # perhaps mock still running as root, give it a sec to drop pivledges and try again
-        time.sleep(1)
-        parent.terminate()
-
-    if need_stop:
-        if verbose:
-            log(g_opts.logfile, "Continue %d" % parent.pid)
-
-        parent.send_signal(signal.SIGCONT)
-
-
-def child_signal_handler(signum, frame):
-    global g_opts
-    my_pid = os.getpid()
-    # log(g_opts.logfile, "--------- child %d recieved signal %d" % (my_pid, signum))
-    p = psutil.Process(my_pid)
-    kill_proc_and_descentents(p)
-    try:
-        sys.exit(0)
-    except SystemExit as e:
-        os._exit(0)
-
-def signal_handler(signum, frame):
-    global g_opts
-    global stop_signal
-    global workers
-    global worker_data
-    stop_signal = True
-
-    # Signal processes to complete
-    log(g_opts.logfile, "recieved signal %d, Terminating children" % signum)
-    for wd in worker_data:
-        p = wd['proc']
-        ret = p.exitcode
-        if ret is None:
-            # log(g_opts.logfile, "terminate child %d" % p.pid)
-            p.terminate()
-        else:
-            log(g_opts.logfile, "child return code was %d" % ret)
-
-    # Wait for remaining processes to complete
-    log(g_opts.logfile, "===== wait for signaled jobs to complete =====")
-    while len(worker_data) > 0:
-        log(g_opts.logfile, "    remaining workers: %d" % workers)
-        reaped = reaper(g_opts)
-        if reaped == 0:
-            time.sleep(0.1)
-
-    try:
-        sys.exit(1)
-    except SystemExit as e:
-        os._exit(1)
-
-def main(args):
-    opts, args = parse_args(args)
-    # take mock config + list of pkgs
-
-    global g_opts
-    global stop_signal
-    global build_env
-    global worker_data
-    global workers
-    global max_workers
-
-    global slow_pkg_names
-    global slow_pkgs
-    global big_pkg_names
-    global big_pkgs
-    max_workers = int(opts.max_workers)
-
-    global failed
-    global built_pkgs
-
-    cfg = opts.chroot
-    pkgs = args[1:]
-
-    # transform slow/big package options into dictionaries
-    for line in opts.slow_pkg_names_raw:
-        speed,name = line.split(":")
-        if speed != "":
-            slow_pkg_names[name]=int(speed)
-    for line in opts.slow_pkgs_raw:
-        speed,pkg = line.split(":")
-        if speed != "":
-            slow_pkgs[pkg]=int(speed)
-    for line in opts.big_pkg_names_raw:
-        size_gb,name = line.split(":")
-        if size_gb != "":
-            big_pkg_names[name]=int(size_gb)
-    for line in opts.big_pkgs_raw:
-        size_gb,pkg = line.split(":")
-        if size_gb != "":
-            big_pkgs[pkg]=int(size_gb)
-
-    # Set up a mapping between pkg path and pkg name
-    global pkg_to_name
-    global name_to_pkg
-    for pkg in pkgs:
-        if not pkg.endswith('.rpm'):
-            log(opts.logfile, "%s doesn't appear to be an rpm - skipping" % pkg)
-            continue
-
-        try:
-            name = rpmName(pkg)
-        except OSError as e:
-            print("Could not parse rpm %s" % pkg)
-            sys.exit(1)
-
-        pkg_to_name[pkg] = name
-        name_to_pkg[name] = pkg
-
-    read_deps(opts)
-
-    global config_opts
-    config_opts = mockbuild.util.load_config(mockconfig_path, cfg, None, __VERSION__, PKGPYTHONDIR)
-
-    if not opts.tmp_prefix:
-        try:
-            opts.tmp_prefix = os.getlogin()
-        except OSError as e:
-            print("Could not find login name for tmp dir prefix add --tmp_prefix")
-            sys.exit(1)
-    pid = os.getpid()
-    opts.uniqueext = '%s-%s' % (opts.tmp_prefix, pid)
-
-    if opts.basedir != "/var/lib/mock":
-        opts.uniqueext = ''
-
-    # create a tempdir for our local info
-    if opts.localrepo:
-        local_tmp_dir = os.path.abspath(opts.localrepo)
-        if not os.path.exists(local_tmp_dir):
-            os.makedirs(local_tmp_dir)
-            os.chmod(local_tmp_dir, 0o755)
-    else:
-        pre = 'mock-chain-%s-' % opts.uniqueext
-        local_tmp_dir = tempfile.mkdtemp(prefix=pre, dir='/var/tmp')
-        os.chmod(local_tmp_dir, 0o755)
-
-    if opts.logfile:
-        opts.logfile = os.path.join(local_tmp_dir, opts.logfile)
-        if os.path.exists(opts.logfile):
-            os.unlink(opts.logfile)
-
-    log(opts.logfile, "starting logfile: %s" % opts.logfile)
-  
-    opts.local_repo_dir = os.path.normpath(local_tmp_dir + '/results/' + config_opts['chroot_name'] + '/')
-
-    if not os.path.exists(opts.local_repo_dir):
-        os.makedirs(opts.local_repo_dir, mode=0o755)
-
-    local_baseurl = "file://%s" % opts.local_repo_dir
-    log(opts.logfile, "results dir: %s" % opts.local_repo_dir)
-    opts.config_path = os.path.normpath(local_tmp_dir + '/configs/' + config_opts['chroot_name'] + '/')
-
-    if not os.path.exists(opts.config_path):
-        os.makedirs(opts.config_path, mode=0o755)
-
-    log(opts.logfile, "config dir: %s" % opts.config_path)
-
-    my_mock_config = os.path.join(opts.config_path, "{0}.cfg".format(config_opts['chroot_name']))
-
-    # modify with localrepo
-    res, msg = add_local_repo(config_opts['config_file'], my_mock_config, local_baseurl, 'local_build_repo')
-    if not res:
-        log(opts.logfile, "Error: Could not write out local config: %s" % msg)
-        sys.exit(1)
-
-    for baseurl in opts.repos:
-        res, msg = add_local_repo(my_mock_config, my_mock_config, baseurl)
-        if not res:
-            log(opts.logfile, "Error: Could not add: %s to yum config in mock chroot: %s" % (baseurl, msg))
-            sys.exit(1)
-
-    res, msg = set_basedir(my_mock_config, my_mock_config, opts.basedir, opts)
-    if not res:
-        log(opts.logfile, "Error: Could not write out local config: %s" % msg)
-        sys.exit(1)
-
-    # these files needed from the mock.config dir to make mock run
-    for fn in ['site-defaults.cfg', 'logging.ini']:
-        pth = mockconfig_path + '/' + fn
-        shutil.copyfile(pth, opts.config_path + '/' + fn)
-
-    # createrepo on it
-    err = createrepo(opts.local_repo_dir)[1]
-    if err.strip():
-        log(opts.logfile, "Error making local repo: %s" % opts.local_repo_dir)
-        log(opts.logfile, "Err: %s" % err)
-        # Temporary disable
-        # https://github.com/rpm-software-management/mock/issues/249
-        #sys.exit(1)
-
-
-    init_build_env(max_workers, opts, config_opts)
-
-    download_dir = tempfile.mkdtemp()
-    downloaded_pkgs = {}
-    built_pkgs = []
-    try_again = True
-    to_be_built = pkgs
-    return_code = 0
-    num_of_tries = 0
-
-    g_opts = opts
-    signal.signal(signal.SIGTERM, signal_handler)
-    signal.signal(signal.SIGINT, signal_handler)
-    signal.signal(signal.SIGHUP, signal_handler)
-    signal.signal(signal.SIGABRT, signal_handler)
-
-    while try_again and not stop_signal:
-        num_of_tries += 1
-        failed = []
-    
-        log(opts.logfile, "===== iteration %d start =====" % num_of_tries)
-
-        to_be_built_scheduled = to_be_built[:]
-
-        need_reap = False
-        while len(to_be_built_scheduled) > 0:
-            # Free up a worker
-            while need_reap or workers >= max_workers:
-                need_reap = False
-                reaped = reaper(opts)
-                if reaped == 0:
-                    time.sleep(0.1)
-                
-            if workers < max_workers:
-                workers = workers + 1
-                
-                b = get_idle_build_env(max_workers)
-                if b < 0:
-                    log(opts.logfile, "Failed to find idle build env for: %s" % pkg)
-                    workers = workers - 1
-                    need_reap = True
-                    continue
-
-                pkg = schedule(b, to_be_built_scheduled, opts)
-                if pkg is None:
-                    if workers <= 1:
-                        # Remember we have one build environmnet reserved, so can't test for zero workers
-                        log(opts.logfile, "failed to schedule from: %s" % to_be_built_scheduled)
-                        pkg = to_be_built_scheduled[0]
-                        log(opts.logfile, "All workers idle, forcing build of pkg=%s" % pkg)
-                    else:
-                        release_build_env(b)
-                        workers = workers - 1
-                        need_reap = True
-                        continue
-
-                to_be_built_scheduled.remove(pkg)
-
-                if not pkg.endswith('.rpm'):
-                    log(opts.logfile, "%s doesn't appear to be an rpm - skipping" % pkg)
-                    failed.append(pkg)
-                    release_build_env(b)
-                    need_reap = True
-                    continue
-
-                elif pkg.startswith('http://') or pkg.startswith('https://') or pkg.startswith('ftp://'):
-                    url = pkg
-                    try:
-                        log(opts.logfile, 'Fetching %s' % url)
-                        r = requests.get(url)
-                        # pylint: disable=no-member
-                        if r.status_code == requests.codes.ok:
-                            fn = urlsplit(r.url).path.rsplit('/', 1)[1]
-                            if 'content-disposition' in r.headers:
-                                _, params = cgi.parse_header(r.headers['content-disposition'])
-                                if 'filename' in params and params['filename']:
-                                    fn = params['filename']
-                            pkg = download_dir + '/' + fn
-                            with open(pkg, 'wb') as fd:
-                                for chunk in r.iter_content(4096):
-                                    fd.write(chunk)
-                    except Exception as e:
-                        log(opts.logfile, 'Error Downloading %s: %s' % (url, str(e)))
-                        failed.append(url)
-                        release_build_env(b)
-                        need_reap = True
-                        continue
-                    else:
-                        downloaded_pkgs[pkg] = url
-
-                log(opts.logfile, "Start build on 'b%d': %s" % (b, pkg))
-                # ret = do_build(opts, config_opts['chroot_name'], pkg)[0]
-                p = multiprocessing.Process(target=do_build, args=(opts, build_env[b]['cfg'], pkg))
-                worker_data.append({'proc': p, 'pkg': pkg, 'build_index': int(b)})
-                p.start()
-
-        # Wait for remaining processes to complete
-        log(opts.logfile, "===== wait for last jobs in iteration %d to complete =====" % num_of_tries)
-        while workers > 0:
-            reaped = reaper(opts)
-            if reaped == 0:
-                time.sleep(0.1)
-        log(opts.logfile, "===== iteration %d complete =====" % num_of_tries)
-                
-        if failed and opts.recurse:
-            log(opts.logfile, "failed=%s" % failed)
-            log(opts.logfile, "to_be_built=%s" % to_be_built)
-            if len(failed) != len(to_be_built):
-                to_be_built = failed
-                try_again = True
-                log(opts.logfile, 'Some package succeeded, some failed.')
-                log(opts.logfile, 'Trying to rebuild %s failed pkgs, because --recurse is set.' % len(failed))
-            else:
-                if max_workers > 1:
-                    max_workers = 1
-                    to_be_built = failed
-                    try_again = True
-                    log(opts.logfile, 'Some package failed under parallel build.')
-                    log(opts.logfile, 'Trying to rebuild %s failed pkgs with single thread, because --recurse is set.' % len(failed))
-                else:
-                    log(opts.logfile, "")
-                    log(opts.logfile, "*** Build Failed ***")
-                    log(opts.logfile, "Tried %s times - following pkgs could not be successfully built:" % num_of_tries)
-                    log(opts.logfile, "*** Build Failed ***")
-                    for pkg in failed:
-                        msg = pkg
-                        if pkg in downloaded_pkgs:
-                            msg = downloaded_pkgs[pkg]
-                        log(opts.logfile, msg)
-                    log(opts.logfile, "")
-                    try_again = False
-        else:
-            try_again = False
-            if failed:
-                return_code = 2
-
-    # cleaning up our download dir
-    shutil.rmtree(download_dir, ignore_errors=True)
-
-    log(opts.logfile, "")
-    log(opts.logfile, "Results out to: %s" % opts.local_repo_dir)
-    log(opts.logfile, "")
-    log(opts.logfile, "Pkgs built: %s" % len(built_pkgs))
-    if built_pkgs:
-        if failed:
-            if len(built_pkgs):
-                log(opts.logfile, "Some packages successfully built in this order:")
-        else:
-            log(opts.logfile, "Packages successfully built in this order:")
-        for pkg in built_pkgs:
-            log(opts.logfile, pkg)
-    return return_code
-
-
-if __name__ == "__main__":
-    sys.exit(main(sys.argv))
diff --git a/build-tools/mockchain-parallel-2.6 b/build-tools/mockchain-parallel-2.6
deleted file mode 100755
index c159d9bf..00000000
--- a/build-tools/mockchain-parallel-2.6
+++ /dev/null
@@ -1,1221 +0,0 @@
-#!/usr/bin/python3 -tt
-# -*- coding: utf-8 -*-
-# vim: noai:ts=4:sw=4:expandtab
-
-# by skvidal@fedoraproject.org
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU Library General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301  USA.
-# copyright 2012 Red Hat, Inc.
-
-# SUMMARY
-# mockchain
-# take a mock config and a series of srpms
-# rebuild them one at a time
-# adding each to a local repo
-# so they are available as build deps to next pkg being built
-from __future__ import print_function
-
-import cgi
-# pylint: disable=deprecated-module
-import optparse
-import os
-import re
-import shutil
-import subprocess
-import sys
-import tempfile
-import time
-import multiprocessing
-import signal
-import psutil
-
-import requests
-# pylint: disable=import-error
-from six.moves.urllib_parse import urlsplit
-
-import mockbuild.util
-
-from stxRpmUtils import splitRpmFilename
-
-# all of the variables below are substituted by the build system
-__VERSION__="2.6"
-SYSCONFDIR="/etc"
-PYTHONDIR="/usr/lib/python3.6/site-packages"
-PKGPYTHONDIR="/usr/lib/python3.6/site-packages/mockbuild"
-MOCKCONFDIR = os.path.join(SYSCONFDIR, "mock")
-# end build system subs
-
-mockconfig_path = '/etc/mock'
-
-def rpmName(path):
-    filename = os.path.basename(path)
-    (n, v, r, e, a) = splitRpmFilename(filename)
-    return n
-
-def createrepo(path):
-    global max_workers
-    if os.path.exists(path + '/repodata/repomd.xml'):
-        comm = ['/usr/bin/createrepo_c', '--update', '--retain-old-md', "%d" % max_workers, "--workers", "%d" % max_workers, path]
-    else:
-        comm = ['/usr/bin/createrepo_c', "--workers", "%d" % max_workers, path]
-    cmd = subprocess.Popen(
-        comm, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    out, err = cmd.communicate()
-    return out, err
-
-
-g_opts = optparse.Values()
-
-def parse_args(args):
-    parser = optparse.OptionParser('\nmockchain -r mockcfg pkg1 [pkg2] [pkg3]')
-    parser.add_option(
-        '-r', '--root', default=None, dest='chroot',
-        metavar="CONFIG",
-        help="chroot config name/base to use in the mock build")
-    parser.add_option(
-        '-l', '--localrepo', default=None,
-        help="local path for the local repo, defaults to making its own")
-    parser.add_option(
-        '-c', '--continue', default=False, action='store_true',
-        dest='cont',
-        help="if a pkg fails to build, continue to the next one")
-    parser.add_option(
-        '-a', '--addrepo', default=[], action='append',
-        dest='repos',
-        help="add these repo baseurls to the chroot's yum config")
-    parser.add_option(
-        '--recurse', default=False, action='store_true',
-        help="if more than one pkg and it fails to build, try to build the rest and come back to it")
-    parser.add_option(
-        '--log', default=None, dest='logfile',
-        help="log to the file named by this option, defaults to not logging")
-    parser.add_option(
-        '--workers', default=1, dest='max_workers',
-        help="number of parallel build jobs")
-    parser.add_option(
-        '--worker-resources', default="", dest='worker_resources',
-        help="colon seperated list, how much mem in gb for each workers temfs")
-    parser.add_option(
-        '--basedir', default='/var/lib/mock', dest='basedir',
-        help="path to workspace")
-    parser.add_option(
-        '--tmp_prefix', default=None, dest='tmp_prefix',
-        help="tmp dir prefix - will default to username-pid if not specified")
-    parser.add_option(
-        '-m', '--mock-option', default=[], action='append',
-        dest='mock_option',
-        help="option to pass directly to mock")
-    parser.add_option(
-        '--mark-slow-name', default=[], action='append',
-        dest='slow_pkg_names_raw',
-        help="package name that is known to build slowly")
-    parser.add_option(
-        '--mark-slow-path', default=[], action='append',
-        dest='slow_pkgs_raw',
-        help="package path that is known to build slowly")
-    parser.add_option(
-        '--mark-big-name', default=[], action='append',
-        dest='big_pkg_names_raw',
-        help="package name that is known to require a lot of disk space to build")
-    parser.add_option(
-        '--mark-big-path', default=[], action='append',
-        dest='big_pkgs_raw',
-        help="package path that is known to require a lot of disk space to build")
-    parser.add_option(
-        '--srpm-dependency-file', default=None,
-        dest='srpm_dependency_file',
-        help="path to srpm dependency file")
-    parser.add_option(
-        '--rpm-dependency-file', default=None,
-        dest='rpm_dependency_file',
-        help="path to rpm dependency file")
-    parser.add_option(
-        '--rpm-to-srpm-map-file', default=None,
-        dest='rpm_to_srpm_map_file',
-        help="path to rpm to srpm map file")
-
-    opts, args = parser.parse_args(args)
-    if opts.recurse:
-        opts.cont = True
-
-    if not opts.chroot:
-        print("You must provide an argument to -r for the mock chroot")
-        sys.exit(1)
-
-    if len(sys.argv) < 3:
-        print("You must specify at least 1 package to build")
-        sys.exit(1)
-
-    return opts, args
-
-
-REPOS_ID = []
-
-slow_pkg_names={}
-slow_pkgs={}
-big_pkg_names={}
-big_pkgs={}
-
-def generate_repo_id(baseurl):
-    """ generate repository id for yum.conf out of baseurl """
-    repoid = "/".join(baseurl.split('//')[1:]).replace('/', '_')
-    repoid = re.sub(r'[^a-zA-Z0-9_]', '', repoid)
-    suffix = ''
-    i = 1
-    while repoid + suffix in REPOS_ID:
-        suffix = str(i)
-        i += 1
-    repoid = repoid + suffix
-    REPOS_ID.append(repoid)
-    return repoid
-
-
-def set_build_idx(infile, destfile, build_idx, tmpfs_size_gb, opts):
-    # log(opts.logfile, "set_build_idx: infile=%s, destfile=%s, build_idx=%d, tmpfs_size_gb=%d" % (infile, destfile, build_idx, tmpfs_size_gb))
-
-    try:
-        with open(infile) as f:
-            code = compile(f.read(), infile, 'exec')
-        # pylint: disable=exec-used
-        exec(code)
-
-        config_opts['root'] = config_opts['root'].replace('b0', 'b{0}'.format(build_idx))
-        config_opts['rootdir'] = config_opts['rootdir'].replace('b0', 'b{0}'.format(build_idx))
-        config_opts['cache_topdir'] = config_opts['cache_topdir'].replace('b0', 'b{0}'.format(build_idx))
-        # log(opts.logfile, "set_build_idx: root=%s" % config_opts['root'])
-        # log(opts.logfile, "set_build_idx: cache_topdir=%s" % config_opts['cache_topdir'])
-        if tmpfs_size_gb > 0:
-            config_opts['plugin_conf']['tmpfs_enable'] = True
-            config_opts['plugin_conf']['tmpfs_opts'] = {}
-            config_opts['plugin_conf']['tmpfs_opts']['required_ram_mb'] = 1024
-            config_opts['plugin_conf']['tmpfs_opts']['max_fs_size'] = "%dg" % tmpfs_size_gb
-            config_opts['plugin_conf']['tmpfs_opts']['mode'] = '0755'
-            config_opts['plugin_conf']['tmpfs_opts']['keep_mounted'] = True
-            # log(opts.logfile, "set_build_idx: plugin_conf->tmpfs_enable=%s" % config_opts['plugin_conf']['tmpfs_enable'])
-            # log(opts.logfile, "set_build_idx: plugin_conf->tmpfs_opts->max_fs_size=%s" % config_opts['plugin_conf']['tmpfs_opts']['max_fs_size'])
-
-        with open(destfile, 'w') as br_dest:
-            for k, v in list(config_opts.items()):
-                br_dest.write("config_opts[%r] = %r\n" % (k, v))
-
-        try:
-            log(opts.logfile, "set_build_idx: os.makedirs %s" % config_opts['cache_topdir'])
-            if not os.path.isdir(config_opts['cache_topdir']):
-                os.makedirs(config_opts['cache_topdir'], exist_ok=True)
-        except (IOError, OSError):
-            return False, "Could not create dir: %s" % config_opts['cache_topdir']
-
-        cache_dir = "%s/%s/mock" % (config_opts['basedir'], config_opts['root'])
-        try:
-            log(opts.logfile, "set_build_idx: os.makedirs %s" % cache_dir)
-            if not os.path.isdir(cache_dir):
-                os.makedirs(cache_dir)
-        except (IOError, OSError):
-            return False, "Could not create dir: %s" % cache_dir
-
-        return True, ''
-    except (IOError, OSError):
-        return False, "Could not write mock config to %s" % destfile
-
-    return True, ''
-
-def set_basedir(infile, destfile, basedir, opts):
-    log(opts.logfile, "set_basedir: infile=%s, destfile=%s, basedir=%s" % (infile, destfile, basedir))
-    try:
-        with open(infile) as f:
-            code = compile(f.read(), infile, 'exec')
-        # pylint: disable=exec-used
-        exec(code)
-
-        config_opts['basedir'] = basedir
-        config_opts['resultdir'] = '{0}/result'.format(basedir)
-        config_opts['backup_base_dir'] = '{0}/backup'.format(basedir)
-        config_opts['root'] = 'mock/b0'
-        config_opts['cache_topdir'] = '{0}/cache/b0'.format(basedir)
-        config_opts['rootdir'] = '{0}/mock/b0/root'.format(basedir)
-
-        with open(destfile, 'w') as br_dest:
-            for k, v in list(config_opts.items()):
-                br_dest.write("config_opts[%r] = %r\n" % (k, v))
-        return True, ''
-    except (IOError, OSError):
-        return False, "Could not write mock config to %s" % destfile
-
-    return True, ''
-
-def add_local_repo(infile, destfile, baseurl, repoid=None):
-    """take a mock chroot config and add a repo to it's yum.conf
-       infile = mock chroot config file
-       destfile = where to save out the result
-       baseurl = baseurl of repo you wish to add"""
-    global config_opts
-
-    try:
-        with open(infile) as f:
-            code = compile(f.read(), infile, 'exec')
-        # pylint: disable=exec-used
-        exec(code)
-        if not repoid:
-            repoid = generate_repo_id(baseurl)
-        else:
-            REPOS_ID.append(repoid)
-        localyumrepo = """
-[%s]
-name=%s
-baseurl=%s
-enabled=1
-skip_if_unavailable=1
-metadata_expire=0
-cost=1
-best=1
-""" % (repoid, baseurl, baseurl)
-
-        config_opts['yum.conf'] += localyumrepo
-        with open(destfile, 'w') as br_dest:
-            for k, v in list(config_opts.items()):
-                br_dest.write("config_opts[%r] = %r\n" % (k, v))
-        return True, ''
-    except (IOError, OSError):
-        return False, "Could not write mock config to %s" % destfile
-
-    return True, ''
-
-
-def do_build(opts, cfg, pkg):
-
-    # returns 0, cmd, out, err = failure
-    # returns 1, cmd, out, err  = success
-    # returns 2, None, None, None = already built
-
-    signal.signal(signal.SIGTERM, child_signal_handler)
-    signal.signal(signal.SIGINT, child_signal_handler)
-    signal.signal(signal.SIGHUP, child_signal_handler)
-    signal.signal(signal.SIGABRT, child_signal_handler)
-    s_pkg = os.path.basename(pkg)
-    pdn = s_pkg.replace('.src.rpm', '')
-    resdir = '%s/%s' % (opts.local_repo_dir, pdn)
-    resdir = os.path.normpath(resdir)
-    if not os.path.exists(resdir):
-        os.makedirs(resdir)
-
-    success_file = resdir + '/success'
-    fail_file = resdir + '/fail'
-
-    if os.path.exists(success_file):
-        # return 2, None, None, None
-        sys.exit(2)
-
-    # clean it up if we're starting over :)
-    if os.path.exists(fail_file):
-        os.unlink(fail_file)
-
-    if opts.uniqueext == '':
-        mockcmd = ['/usr/bin/mock',
-                   '--configdir', opts.config_path,
-                   '--resultdir', resdir,
-                   '--root', cfg, ]
-    else:
-        mockcmd = ['/usr/bin/mock',
-                   '--configdir', opts.config_path,
-                   '--resultdir', resdir,
-                   '--uniqueext', opts.uniqueext,
-                   '--root', cfg, ]
-
-    # Ensure repo is up-to-date.
-    # Note: Merely adding --update to mockcmd failed to update
-    mockcmd_update=mockcmd
-    mockcmd_update.append('--update')
-    cmd = subprocess.Popen(
-        mockcmd_update, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    out, err = cmd.communicate()
-    if cmd.returncode != 0:
-        if (isinstance(err, bytes)):
-            err = err.decode("utf-8")
-        sys.stderr.write(err)
-
-    # heuristic here, if user pass for mock "-d foo", but we must be care to leave
-    # "-d'foo bar'" or "--define='foo bar'" as is
-    compiled_re_1 = re.compile(r'^(-\S)\s+(.+)')
-    compiled_re_2 = re.compile(r'^(--[^ =])[ =](\.+)')
-    for option in opts.mock_option:
-        r_match = compiled_re_1.match(option)
-        if r_match:
-            mockcmd.extend([r_match.group(1), r_match.group(2)])
-        else:
-            r_match = compiled_re_2.match(option)
-            if r_match:
-                mockcmd.extend([r_match.group(1), r_match.group(2)])
-            else:
-                mockcmd.append(option)
-
-    print('building %s' % s_pkg)
-    mockcmd.append(pkg)
-    cmd = subprocess.Popen(
-        mockcmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    out, err = cmd.communicate()
-    if cmd.returncode == 0:
-        with open(success_file, 'w') as f:
-            f.write('done\n')
-        ret = 1
-    else:
-        if (isinstance(err, bytes)):
-            err = err.decode("utf-8")
-        sys.stderr.write(err)
-        with open(fail_file, 'w') as f:
-            f.write('undone\n')
-        ret = 0
-
-    # return ret, cmd, out, err
-    sys.exit(ret)
-
-
-def log(lf, msg):
-    if lf:
-        now = time.time()
-        try:
-            with open(lf, 'a') as f:
-                f.write(str(now) + ':' + msg + '\n')
-        except (IOError, OSError) as e:
-            print('Could not write to logfile %s - %s' % (lf, str(e)))
-    print(msg)
-
-
-config_opts = {}
-
-worker_data = []
-workers = 0
-max_workers = 1
-
-build_env = []
-
-failed = []
-built_pkgs = []
-
-local_repo_dir = ""
-
-pkg_to_name={}
-name_to_pkg={}
-srpm_dependencies_direct={}
-rpm_dependencies_direct={}
-rpm_to_srpm_map={}
-no_dep_list = [ "bash", "kernel" , "kernel-rt" ]
-
-
-def init_build_env(slots, opts, config_opts_in):
-    global build_env
-
-    orig_chroot_name=config_opts_in['chroot_name']
-    orig_mock_config = os.path.join(opts.config_path, "{0}.cfg".format(orig_chroot_name))
-    # build_env.append({'state': 'Idle', 'cfg': orig_mock_config})
-    for i in range(0,slots):
-        new_chroot_name = "{0}.b{1}".format(orig_chroot_name, i)
-        new_mock_config = os.path.join(opts.config_path, "{0}.cfg".format(new_chroot_name))
-        tmpfs_size_gb = 0
-        if opts.worker_resources == "":
-            if i > 0:
-                tmpfs_size_gb = 2 * (1 + slots - i)
-        else:
-            resource_array=opts.worker_resources.split(':')
-            if i < len(resource_array):
-                tmpfs_size_gb=int(resource_array[i])
-            else:
-                log(opts.logfile, "Error: worker-resources argument '%s' does not supply info for all %d workers" % (opts.worker_resources, slots))
-                sys.exit(1)
-        if i == 0 and tmpfs_size_gb != 0:
-            log(opts.logfile, "Error: worker-resources argument '%s' must pass '0' as first value" % (opts.worker_resources, slots))
-            sys.exit(1)
-        build_env.append({'state': 'Idle', 'cfg': new_mock_config, 'fs_size_gb': tmpfs_size_gb})
-
-        res, msg = set_build_idx(orig_mock_config, new_mock_config, i, tmpfs_size_gb, opts)
-        if not res:
-            log(opts.logfile, "Error: Could not write out local config: %s" % msg)
-            sys.exit(1)
-
-
-idle_build_env_last_awarded = 0
-def get_idle_build_env(slots):
-    global build_env
-    global idle_build_env_last_awarded
-    visited = 0
-
-    if slots < 1:
-        return -1
-
-    i = idle_build_env_last_awarded - 1
-    if i < 0 or i >= slots:
-        i = slots - 1
-
-    while visited < slots:
-        if build_env[i]['state'] == 'Idle':
-            build_env[i]['state'] = 'Busy'
-            idle_build_env_last_awarded = i
-            return i
-        visited = visited + 1
-        i = i - 1
-        if i < 0:
-            i = slots - 1
-    return -1
-
-def release_build_env(idx):
-    global build_env
-
-    build_env[idx]['state'] = 'Idle'
-
-def get_best_rc(a, b):
-    print("get_best_rc: a=%s" % str(a))
-    print("get_best_rc: b=%s" % str(b))
-    if (b == {}) and (a != {}):
-        return a
-    if (a == {}) and (b != {}):
-        return b
-
-    if (b['build_name'] is None) and (not a['build_name'] is None):
-        return a
-    if (a['build_name'] is None) and (not b['build_name'] is None):
-        return b
-
-    if a['unbuilt_deps'] < b['unbuilt_deps']:
-        return a
-    if b['unbuilt_deps'] < a['unbuilt_deps']:
-        return b
-
-    if a['depth'] < b['depth']:
-        return a
-    if b['depth'] < a['depth']:
-        return b
-
-    print("get_best_rc: uncertain %s vs %s" % (a,b))
-    return a
-
-unbuilt_dep_list_print=False
-def unbuilt_dep_list(name, unbuilt_pkg_names, depth, checked=None):
-    global srpm_dependencies_direct
-    global rpm_dependencies_direct
-    global rpm_to_srpm_map
-    global no_dep_list
-    global unbuilt_dep_list_print
-
-    first_iteration=False
-    unbuilt = []
-    if name in no_dep_list:
-        return unbuilt
-
-    if checked is None:
-        first_iteration=True
-        checked=[]
-
-    # Count unbuild dependencies
-    if first_iteration:
-        dependencies_direct=srpm_dependencies_direct
-    else:
-        dependencies_direct=rpm_dependencies_direct
-
-    if name in dependencies_direct:
-        for rdep in dependencies_direct[name]:
-            sdep='???'
-            if rdep in rpm_to_srpm_map:
-                sdep = rpm_to_srpm_map[rdep]
-            if rdep != name and sdep != name and not rdep in checked:
-                if (not first_iteration) and (sdep in no_dep_list):
-                    continue
-                checked.append(rdep)
-                if sdep in unbuilt_pkg_names:
-                    if not sdep in unbuilt:
-                        unbuilt.append(sdep)
-                if depth > 0:
-                    child_unbuilt = unbuilt_dep_list(rdep, unbuilt_pkg_names, depth-1, checked)
-                    for sub_sdep in child_unbuilt:
-                        if sub_sdep != name:
-                            if not sub_sdep in unbuilt:
-                                unbuilt.append(sub_sdep)
-
-    return unbuilt
-
-def can_build_at_idx(build_idx, name, opts):
-    global pkg_to_name
-    global name_to_pkg
-    global big_pkgs
-    global big_pkg_names
-    global slow_pkgs
-    global slow_pkg_names
-    global build_env
-
-    fs_size_gb = 0
-    size_gb = 0
-    speed = 0
-    pkg = name_to_pkg[name]
-    if name in big_pkg_names:
-        size_gb=big_pkg_names[name]
-    if pkg in big_pkgs:
-        size_gb=big_pkgs[pkg]
-    if name in slow_pkg_names:
-        speed=slow_pkg_names[name]
-    if pkg in slow_pkgs:
-        speed=slow_pkgs[pkg]
-    fs_size_gb = build_env[build_idx]['fs_size_gb']
-    return fs_size_gb == 0 or fs_size_gb >= size_gb
-
-def schedule(build_idx, pkgs, opts):
-    global worker_data
-    global pkg_to_name
-    global name_to_pkg
-    global big_pkgs
-    global big_pkg_names
-    global slow_pkgs
-    global slow_pkg_names
-
-    unbuilt_pkg_names=[]
-    building_pkg_names=[]
-    unprioritized_pkg_names=[]
-
-    for pkg in pkgs:
-        name = pkg_to_name[pkg]
-        unbuilt_pkg_names.append(name)
-        unprioritized_pkg_names.append(name)
-
-    prioritized_pkg_names=[]
-
-    for wd in worker_data:
-        pkg = wd['pkg']
-        if not pkg is None:
-            name = pkg_to_name[pkg]
-            building_pkg_names.append(name)
-
-    # log(opts.logfile, "schedule: build_idx=%d  start" % build_idx)
-    if len(big_pkg_names) or len(big_pkgs):
-        next_unprioritized_pkg_names = unprioritized_pkg_names[:]
-        for name in unprioritized_pkg_names:
-            pkg = name_to_pkg[name]
-            if name in big_pkg_names or pkg in big_pkgs:
-                prioritized_pkg_names.append(name)
-                next_unprioritized_pkg_names.remove(name)
-        unprioritized_pkg_names = next_unprioritized_pkg_names[:]
-
-    if len(slow_pkg_names) or len(slow_pkgs):
-        next_unprioritized_pkg_names = unprioritized_pkg_names[:]
-        for name in unprioritized_pkg_names:
-            pkg = name_to_pkg[name]
-            if name in slow_pkg_names or pkg in slow_pkgs:
-                if can_build_at_idx(build_idx, name, opts):
-                    prioritized_pkg_names.append(name)
-                    next_unprioritized_pkg_names.remove(name)
-        unprioritized_pkg_names = next_unprioritized_pkg_names[:]
-
-    for name in unprioritized_pkg_names:
-        if can_build_at_idx(build_idx, name, opts):
-            prioritized_pkg_names.append(name)
-
-    name_out = schedule2(build_idx, prioritized_pkg_names, unbuilt_pkg_names, building_pkg_names, opts)
-    if not name_out is None:
-        pkg_out = name_to_pkg[name_out]
-    else:
-        pkg_out = None
-        # log(opts.logfile, "schedule: failed to translate '%s' to a pkg" % name_out)
-    # log(opts.logfile, "schedule: build_idx=%d  end: out = %s -> %s" % (build_idx, str(name_out), str(pkg_out)))
-    return pkg_out
-
-
-def schedule2(build_idx, pkg_names, unbuilt_pkg_names, building_pkg_names, opts):
-    global pkg_to_name
-    global name_to_pkg
-    global no_dep_list
-
-    max_depth = 3
-
-    if len(pkg_names) == 0:
-        return None
-
-    unbuilt_deps={}
-    building_deps={}
-    for depth in range(max_depth,-1,-1):
-        unbuilt_deps[depth]={}
-        building_deps[depth]={}
-
-    for depth in range(max_depth,-1,-1):
-        checked=[]
-        reordered_pkg_names = pkg_names[:]
-        # for name in reordered_pkg_names:
-        while len(reordered_pkg_names):
-            name = reordered_pkg_names.pop(0)
-            if name in checked:
-                continue
-
-            # log(opts.logfile, "checked.append(%s)" % name)
-            checked.append(name)
-
-            pkg = name_to_pkg[name]
-            # log(opts.logfile, "schedule2: check '%s', depth %d" % (name, depth))
-            if not name in unbuilt_deps[depth]:
-                unbuilt_deps[depth][name] = unbuilt_dep_list(name, unbuilt_pkg_names, depth)
-            if not name in building_deps[depth]:
-                building_deps[depth][name] = unbuilt_dep_list(name, building_pkg_names, depth)
-            # log(opts.logfile, "schedule2: unbuilt deps for pkg=%s, depth=%d: %s" % (name, depth, unbuilt_deps[depth][name]))
-            # log(opts.logfile, "schedule2: building deps for pkg=%s, depth=%d: %s" % (name, depth, building_deps[depth][name]))
-            if len(unbuilt_deps[depth][name]) == 0 and len(building_deps[depth][name]) == 0:
-                if can_build_at_idx(build_idx, name, opts):
-                    log(opts.logfile, "schedule2: no unbuilt deps for '%s', searching at depth %d" % (name, depth))
-                    return name
-                else:
-                    # log(opts.logfile, "schedule2: Can't build '%s' on 'b%d'" % (name, build_idx))
-                    continue
-
-            if not name in unbuilt_deps[0]:
-                unbuilt_deps[0][name] = unbuilt_dep_list(name, unbuilt_pkg_names, 0)
-            if not name in building_deps[0]:
-                building_deps[0][name] = unbuilt_dep_list(name, building_pkg_names, 0)
-            # log(opts.logfile, "schedule2: unbuilt deps for pkg=%s, depth=%d: %s" % (name, 0, unbuilt_deps[0][name]))
-            # log(opts.logfile, "schedule2: building deps for pkg=%s, depth=%d: %s" % (name, 0, building_deps[0][name]))
-            if (len(building_deps[depth][name]) == 0 and len(unbuilt_deps[depth][name]) == 1 and unbuilt_deps[depth][name][0] in no_dep_list) or (len(unbuilt_deps[depth][name]) == 0 and len(building_deps[depth][name]) == 1 and building_deps[depth][name][0] in no_dep_list):
-                if len(unbuilt_deps[0][name]) == 0 and len(building_deps[0][name]) == 0:
-                    if can_build_at_idx(build_idx, name, opts):
-                        log(opts.logfile, "schedule2: no unbuilt deps for '%s' except for indirect kernel dep, searching at depth %d" % (name, depth))
-                        return name
-                    else:
-                        # log(opts.logfile, "schedule2: Can't build '%s' on 'b%d'" % (name, build_idx))
-                        continue
-
-            loop = False
-            for dep_name in unbuilt_deps[depth][name]:
-                if name == dep_name:
-                    continue
-
-                # log(opts.logfile, "name=%s depends on dep_name=%s, depth=%d" % (name, dep_name, depth))
-                if dep_name in checked:
-                    continue
-
-                # log(opts.logfile, "schedule2: check '%s' indirect" % dep_name)
-                if not dep_name in unbuilt_deps[depth]:
-                    unbuilt_deps[depth][dep_name] = unbuilt_dep_list(dep_name, unbuilt_pkg_names, depth)
-                if not dep_name in building_deps[depth]:
-                    building_deps[depth][dep_name] = unbuilt_dep_list(dep_name, building_pkg_names, depth)
-                # log(opts.logfile, "schedule2: deps: unbuilt deps for %s -> %s, depth=%d: %s" % (name, dep_name, depth, unbuilt_deps[depth][dep_name]))
-                # log(opts.logfile, "schedule2: deps: building deps for %s -> %s, depth=%d: %s" % (name, dep_name, depth, building_deps[depth][dep_name]))
-                if len(unbuilt_deps[depth][dep_name]) == 0 and len(building_deps[depth][dep_name]) == 0:
-                    if can_build_at_idx(build_idx, dep_name, opts):
-                        log(opts.logfile, "schedule2: deps: no unbuilt deps for '%s', working towards '%s', searching at depth %d" % (dep_name, name, depth))
-                        return dep_name
-
-                if not dep_name in unbuilt_deps[0]:
-                    unbuilt_deps[0][dep_name] = unbuilt_dep_list(dep_name, unbuilt_pkg_names, 0)
-                if not dep_name in building_deps[0]:
-                    building_deps[0][dep_name] = unbuilt_dep_list(dep_name, building_pkg_names, 0)
-                # log(opts.logfile, "schedule2: deps: unbuilt deps for %s -> %s, depth=%d: %s" % (name, dep_name, 0, unbuilt_deps[0][dep_name]))
-                # log(opts.logfile, "schedule2: deps: building deps for %s -> %s, depth=%d: %s" % (name, dep_name, 0, building_deps[0][dep_name]))
-                if (len(building_deps[depth][dep_name]) == 0 and len(unbuilt_deps[depth][dep_name]) == 1 and unbuilt_deps[depth][dep_name][0] in no_dep_list) or (len(unbuilt_deps[depth][dep_name]) == 0 and len(building_deps[depth][dep_name]) == 1 and building_deps[depth][dep_name][0] in no_dep_list):
-                    if len(unbuilt_deps[0][dep_name]) == 0 and len(building_deps[0][dep_name]) == 0:
-                        if can_build_at_idx(build_idx, dep_name, opts):
-                            log(opts.logfile, "schedule2: no unbuilt deps for '%s' except for indirect kernel dep, working towards '%s', searching at depth %d" % (dep_name, name, depth))
-                            return dep_name
-
-                if name in unbuilt_deps[0][dep_name]:
-                    loop = True
-                    # log(opts.logfile, "schedule2: loop detected: %s <-> %s" % (name, dep_name))
-
-            if loop and len(building_deps[depth][name]) == 0:
-                log(opts.logfile, "schedule2: loop detected, try to build '%s'" % name)
-                return name
-
-            for dep_name in unbuilt_deps[depth][name]:
-                if dep_name in reordered_pkg_names:
-                    # log(opts.logfile, "schedule2: promote %s to work toward %s" % (dep_name, name))
-                    reordered_pkg_names.remove(dep_name)
-                    reordered_pkg_names.insert(0,dep_name)
-
-    # log(opts.logfile, "schedule2: Nothing buildable at this time")
-    return None
-
-
-def read_deps(opts):
-    read_srpm_deps(opts)
-    read_rpm_deps(opts)
-    read_map_deps(opts)
-
-def read_srpm_deps(opts):
-    global srpm_dependencies_direct
-
-    if opts.srpm_dependency_file == None:
-        return
-
-    if not os.path.exists(opts.srpm_dependency_file):
-        log(opts.logfile, "File not found: %s" % opts.srpm_dependency_file)
-        sys.exit(1)
-
-    with open(opts.srpm_dependency_file) as f:
-        lines = f.readlines()
-        for line in lines:
-            (name,deps) = line.rstrip().split(';')
-            srpm_dependencies_direct[name]=deps.split(',')
-
-def read_rpm_deps(opts):
-    global rpm_dependencies_direct
-
-    if opts.rpm_dependency_file == None:
-        return
-
-    if not os.path.exists(opts.rpm_dependency_file):
-        log(opts.logfile, "File not found: %s" % opts.rpm_dependency_file)
-        sys.exit(1)
-
-    with open(opts.rpm_dependency_file) as f:
-        lines = f.readlines()
-        for line in lines:
-            (name,deps) = line.rstrip().split(';')
-            rpm_dependencies_direct[name]=deps.split(',')
-
-def read_map_deps(opts):
-    global rpm_to_srpm_map
-
-    if opts.rpm_to_srpm_map_file == None:
-        return
-
-    if not os.path.exists(opts.rpm_to_srpm_map_file):
-        log(opts.logfile, "File not found: %s" % opts.rpm_to_srpm_map_file)
-        sys.exit(1)
-
-    with open(opts.rpm_to_srpm_map_file) as f:
-        lines = f.readlines()
-        for line in lines:
-            (rpm,srpm) = line.rstrip().split(';')
-            rpm_to_srpm_map[rpm]=srpm
-
-
-def reaper(opts):
-    global built_pkgs
-    global failed
-    global worker_data
-    global workers
-
-    reaped = 0
-    need_createrepo = False
-    last_reaped = -1
-    while reaped > last_reaped:
-        last_reaped = reaped
-        for wd in worker_data:
-            p = wd['proc']
-            ret = p.exitcode
-            if ret is not None:
-                pkg = wd['pkg']
-                b = int(wd['build_index'])
-                p.join()
-                worker_data.remove(wd)
-                workers = workers - 1
-                reaped = reaped + 1
-                release_build_env(b)
-
-                log(opts.logfile, "End build on 'b%d': %s" % (b, pkg))
-
-                if ret == 0:
-                    failed.append(pkg)
-                    log(opts.logfile, "Error building %s on 'b%d'." % (os.path.basename(pkg), b))
-                    if opts.recurse and not stop_signal:
-                        log(opts.logfile, "Will try to build again (if some other package will succeed).")
-                    else:
-                        log(opts.logfile, "See logs/results in %s" % opts.local_repo_dir)
-                elif ret == 1:
-                    log(opts.logfile, "Success building %s on 'b%d'" % (os.path.basename(pkg), b))
-                    built_pkgs.append(pkg)
-                    need_createrepo = True
-                elif ret == 2:
-                    log(opts.logfile, "Skipping already built pkg %s" % os.path.basename(pkg))
-
-    if need_createrepo:
-        # createrepo with the new pkgs
-        err = createrepo(opts.local_repo_dir)[1]
-        if err.strip():
-            log(opts.logfile, "Error making local repo: %s" % opts.local_repo_dir)
-            log(opts.logfile, "Err: %s" % err)
-
-    return reaped
-
-stop_signal = False
-
-def on_terminate(proc):
-    print("process {} terminated with exit code {}".format(proc, proc.returncode))
-
-def kill_proc_and_descentents(parent, need_stop=False, verbose=False):
-    global g_opts
-
-    if need_stop:
-        if verbose:
-            log(g_opts.logfile, "Stop %d" % parent.pid)
-
-        try:
-            parent.send_signal(signal.SIGSTOP)
-        except:
-            # perhaps mock still running as root, give it a sec to drop pivledges and try again
-            time.sleep(1)
-            parent.send_signal(signal.SIGSTOP)
-
-    try:
-        children = parent.children(recursive=False)
-    except:
-        children = []
-
-    for p in children:
-        kill_proc_and_descentents(p, need_stop=True, verbose=verbose)
-
-    if verbose:
-        log(g_opts.logfile, "Terminate %d" % parent.pid)
-
-    # parent.send_signal(signal.SIGTERM)
-    try:
-        parent.terminate()
-    except:
-        # perhaps mock still running as root, give it a sec to drop pivledges and try again
-        time.sleep(1)
-        parent.terminate()
-
-    if need_stop:
-        if verbose:
-            log(g_opts.logfile, "Continue %d" % parent.pid)
-
-        parent.send_signal(signal.SIGCONT)
-
-
-def child_signal_handler(signum, frame):
-    global g_opts
-    my_pid = os.getpid()
-    # log(g_opts.logfile, "--------- child %d recieved signal %d" % (my_pid, signum))
-    p = psutil.Process(my_pid)
-    kill_proc_and_descentents(p)
-    try:
-        sys.exit(0)
-    except SystemExit as e:
-        os._exit(0)
-
-def signal_handler(signum, frame):
-    global g_opts
-    global stop_signal
-    global workers
-    global worker_data
-    stop_signal = True
-
-    # Signal processes to complete
-    log(g_opts.logfile, "recieved signal %d, Terminating children" % signum)
-    for wd in worker_data:
-        p = wd['proc']
-        ret = p.exitcode
-        if ret is None:
-            # log(g_opts.logfile, "terminate child %d" % p.pid)
-            p.terminate()
-        else:
-            log(g_opts.logfile, "child return code was %d" % ret)
-
-    # Wait for remaining processes to complete
-    log(g_opts.logfile, "===== wait for signaled jobs to complete =====")
-    while len(worker_data) > 0:
-        log(g_opts.logfile, "    remaining workers: %d" % workers)
-        reaped = reaper(g_opts)
-        if reaped == 0:
-            time.sleep(0.1)
-
-    try:
-        sys.exit(1)
-    except SystemExit as e:
-        os._exit(1)
-
-def main(args):
-    opts, args = parse_args(args)
-    # take mock config + list of pkgs
-
-    global g_opts
-    global stop_signal
-    global build_env
-    global worker_data
-    global workers
-    global max_workers
-
-    global slow_pkg_names
-    global slow_pkgs
-    global big_pkg_names
-    global big_pkgs
-    max_workers = int(opts.max_workers)
-
-    global failed
-    global built_pkgs
-
-    cfg = opts.chroot
-    pkgs = args[1:]
-
-    # transform slow/big package options into dictionaries
-    for line in opts.slow_pkg_names_raw:
-        speed,name = line.split(":")
-        if speed != "":
-            slow_pkg_names[name]=int(speed)
-    for line in opts.slow_pkgs_raw:
-        speed,pkg = line.split(":")
-        if speed != "":
-            slow_pkgs[pkg]=int(speed)
-    for line in opts.big_pkg_names_raw:
-        size_gb,name = line.split(":")
-        if size_gb != "":
-            big_pkg_names[name]=int(size_gb)
-    for line in opts.big_pkgs_raw:
-        size_gb,pkg = line.split(":")
-        if size_gb != "":
-            big_pkgs[pkg]=int(size_gb)
-
-    # Set up a mapping between pkg path and pkg name
-    global pkg_to_name
-    global name_to_pkg
-    for pkg in pkgs:
-        if not pkg.endswith('.rpm'):
-            log(opts.logfile, "%s doesn't appear to be an rpm - skipping" % pkg)
-            continue
-
-        try:
-            name = rpmName(pkg)
-        except OSError as e:
-            print("Could not parse rpm %s" % pkg)
-            sys.exit(1)
-
-        pkg_to_name[pkg] = name
-        name_to_pkg[name] = pkg
-
-    read_deps(opts)
-
-    global config_opts
-    config_opts = mockbuild.util.load_config(mockconfig_path, cfg, None, __VERSION__, PKGPYTHONDIR)
-
-    if not opts.tmp_prefix:
-        try:
-            opts.tmp_prefix = os.getlogin()
-        except OSError as e:
-            print("Could not find login name for tmp dir prefix add --tmp_prefix")
-            sys.exit(1)
-    pid = os.getpid()
-    opts.uniqueext = '%s-%s' % (opts.tmp_prefix, pid)
-
-    if opts.basedir != "/var/lib/mock":
-        opts.uniqueext = ''
-
-    # create a tempdir for our local info
-    if opts.localrepo:
-        local_tmp_dir = os.path.abspath(opts.localrepo)
-        if not os.path.exists(local_tmp_dir):
-            os.makedirs(local_tmp_dir)
-            os.chmod(local_tmp_dir, 0o755)
-    else:
-        pre = 'mock-chain-%s-' % opts.uniqueext
-        local_tmp_dir = tempfile.mkdtemp(prefix=pre, dir='/var/tmp')
-        os.chmod(local_tmp_dir, 0o755)
-
-    if opts.logfile:
-        opts.logfile = os.path.join(local_tmp_dir, opts.logfile)
-        if os.path.exists(opts.logfile):
-            os.unlink(opts.logfile)
-
-    log(opts.logfile, "starting logfile: %s" % opts.logfile)
-
-    opts.local_repo_dir = os.path.normpath(local_tmp_dir + '/results/' + config_opts['chroot_name'] + '/')
-
-    if not os.path.exists(opts.local_repo_dir):
-        os.makedirs(opts.local_repo_dir, mode=0o755)
-
-    local_baseurl = "file://%s" % opts.local_repo_dir
-    log(opts.logfile, "results dir: %s" % opts.local_repo_dir)
-    opts.config_path = os.path.normpath(local_tmp_dir + '/configs/' + config_opts['chroot_name'] + '/')
-
-    if not os.path.exists(opts.config_path):
-        os.makedirs(opts.config_path, mode=0o755)
-
-    log(opts.logfile, "config dir: %s" % opts.config_path)
-
-    my_mock_config = os.path.join(opts.config_path, "{0}.cfg".format(config_opts['chroot_name']))
-
-    # modify with localrepo
-    res, msg = add_local_repo(config_opts['config_file'], my_mock_config, local_baseurl, 'local_build_repo')
-    if not res:
-        log(opts.logfile, "Error: Could not write out local config: %s" % msg)
-        sys.exit(1)
-
-    for baseurl in opts.repos:
-        res, msg = add_local_repo(my_mock_config, my_mock_config, baseurl)
-        if not res:
-            log(opts.logfile, "Error: Could not add: %s to yum config in mock chroot: %s" % (baseurl, msg))
-            sys.exit(1)
-
-    res, msg = set_basedir(my_mock_config, my_mock_config, opts.basedir, opts)
-    if not res:
-        log(opts.logfile, "Error: Could not write out local config: %s" % msg)
-        sys.exit(1)
-
-    # these files needed from the mock.config dir to make mock run
-    for fn in ['site-defaults.cfg', 'logging.ini']:
-        pth = mockconfig_path + '/' + fn
-        shutil.copyfile(pth, opts.config_path + '/' + fn)
-
-    # createrepo on it
-    err = createrepo(opts.local_repo_dir)[1]
-    if err.strip():
-        log(opts.logfile, "Error making local repo: %s" % opts.local_repo_dir)
-        log(opts.logfile, "Err: %s" % err)
-        sys.exit(1)
-
-    init_build_env(max_workers, opts, config_opts)
-
-    download_dir = tempfile.mkdtemp()
-    downloaded_pkgs = {}
-    built_pkgs = []
-    try_again = True
-    to_be_built = pkgs
-    return_code = 0
-    num_of_tries = 0
-
-    g_opts = opts
-    signal.signal(signal.SIGTERM, signal_handler)
-    signal.signal(signal.SIGINT, signal_handler)
-    signal.signal(signal.SIGHUP, signal_handler)
-    signal.signal(signal.SIGABRT, signal_handler)
-
-    while try_again and not stop_signal:
-        num_of_tries += 1
-        failed = []
-
-        log(opts.logfile, "===== iteration %d start =====" % num_of_tries)
-
-        to_be_built_scheduled = to_be_built[:]
-
-        need_reap = False
-        while len(to_be_built_scheduled) > 0:
-            # Free up a worker
-            while need_reap or workers >= max_workers:
-                need_reap = False
-                reaped = reaper(opts)
-                if reaped == 0:
-                    time.sleep(0.1)
-
-            if workers < max_workers:
-                workers = workers + 1
-
-                b = get_idle_build_env(max_workers)
-                if b < 0:
-                    log(opts.logfile, "Failed to find idle build env for: %s" % pkg)
-                    workers = workers - 1
-                    need_reap = True
-                    continue
-
-                pkg = schedule(b, to_be_built_scheduled, opts)
-                if pkg is None:
-                    if workers <= 1:
-                        # Remember we have one build environmnet reserved, so can't test for zero workers
-                        log(opts.logfile, "failed to schedule from: %s" % to_be_built_scheduled)
-                        pkg = to_be_built_scheduled[0]
-                        log(opts.logfile, "All workers idle, forcing build of pkg=%s" % pkg)
-                    else:
-                        release_build_env(b)
-                        workers = workers - 1
-                        need_reap = True
-                        continue
-
-                to_be_built_scheduled.remove(pkg)
-
-                if not pkg.endswith('.rpm'):
-                    log(opts.logfile, "%s doesn't appear to be an rpm - skipping" % pkg)
-                    failed.append(pkg)
-                    release_build_env(b)
-                    need_reap = True
-                    continue
-
-                elif pkg.startswith('http://') or pkg.startswith('https://') or pkg.startswith('ftp://'):
-                    url = pkg
-                    try:
-                        log(opts.logfile, 'Fetching %s' % url)
-                        r = requests.get(url)
-                        # pylint: disable=no-member
-                        if r.status_code == requests.codes.ok:
-                            fn = urlsplit(r.url).path.rsplit('/', 1)[1]
-                            if 'content-disposition' in r.headers:
-                                _, params = cgi.parse_header(r.headers['content-disposition'])
-                                if 'filename' in params and params['filename']:
-                                    fn = params['filename']
-                            pkg = download_dir + '/' + fn
-                            with open(pkg, 'wb') as fd:
-                                for chunk in r.iter_content(4096):
-                                    fd.write(chunk)
-                    except Exception as e:
-                        log(opts.logfile, 'Error Downloading %s: %s' % (url, str(e)))
-                        failed.append(url)
-                        release_build_env(b)
-                        need_reap = True
-                        continue
-                    else:
-                        downloaded_pkgs[pkg] = url
-
-                log(opts.logfile, "Start build on 'b%d': %s" % (b, pkg))
-                # ret = do_build(opts, config_opts['chroot_name'], pkg)[0]
-                p = multiprocessing.Process(target=do_build, args=(opts, build_env[b]['cfg'], pkg))
-                worker_data.append({'proc': p, 'pkg': pkg, 'build_index': int(b)})
-                p.start()
-
-        # Wait for remaining processes to complete
-        log(opts.logfile, "===== wait for last jobs in iteration %d to complete =====" % num_of_tries)
-        while workers > 0:
-            reaped = reaper(opts)
-            if reaped == 0:
-                time.sleep(0.1)
-        log(opts.logfile, "===== iteration %d complete =====" % num_of_tries)
-
-        if failed and opts.recurse:
-            log(opts.logfile, "failed=%s" % failed)
-            log(opts.logfile, "to_be_built=%s" % to_be_built)
-            if len(failed) != len(to_be_built):
-                to_be_built = failed
-                try_again = True
-                log(opts.logfile, 'Some package succeeded, some failed.')
-                log(opts.logfile, 'Trying to rebuild %s failed pkgs, because --recurse is set.' % len(failed))
-            else:
-                if max_workers > 1:
-                    max_workers = 1
-                    to_be_built = failed
-                    try_again = True
-                    log(opts.logfile, 'Some package failed under parallel build.')
-                    log(opts.logfile, 'Trying to rebuild %s failed pkgs with single thread, because --recurse is set.' % len(failed))
-                else:
-                    log(opts.logfile, "")
-                    log(opts.logfile, "*** Build Failed ***")
-                    log(opts.logfile, "Tried %s times - following pkgs could not be successfully built:" % num_of_tries)
-                    log(opts.logfile, "*** Build Failed ***")
-                    for pkg in failed:
-                        msg = pkg
-                        if pkg in downloaded_pkgs:
-                            msg = downloaded_pkgs[pkg]
-                        log(opts.logfile, msg)
-                    log(opts.logfile, "")
-                    try_again = False
-        else:
-            try_again = False
-            if failed:
-                return_code = 2
-
-    # cleaning up our download dir
-    shutil.rmtree(download_dir, ignore_errors=True)
-
-    log(opts.logfile, "")
-    log(opts.logfile, "Results out to: %s" % opts.local_repo_dir)
-    log(opts.logfile, "")
-    log(opts.logfile, "Pkgs built: %s" % len(built_pkgs))
-    if built_pkgs:
-        if failed:
-            if len(built_pkgs):
-                log(opts.logfile, "Some packages successfully built in this order:")
-        else:
-            log(opts.logfile, "Packages successfully built in this order:")
-        for pkg in built_pkgs:
-            log(opts.logfile, pkg)
-    return return_code
-
-
-if __name__ == "__main__":
-    sys.exit(main(sys.argv))
diff --git a/build-tools/mockchain-parallel-2.7 b/build-tools/mockchain-parallel-2.7
deleted file mode 100755
index b9f4bd3f..00000000
--- a/build-tools/mockchain-parallel-2.7
+++ /dev/null
@@ -1,1221 +0,0 @@
-#!/usr/bin/python3 -tt
-# -*- coding: utf-8 -*-
-# vim: noai:ts=4:sw=4:expandtab
-
-# by skvidal@fedoraproject.org
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU Library General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301  USA.
-# copyright 2012 Red Hat, Inc.
-
-# SUMMARY
-# mockchain
-# take a mock config and a series of srpms
-# rebuild them one at a time
-# adding each to a local repo
-# so they are available as build deps to next pkg being built
-from __future__ import print_function
-
-import cgi
-# pylint: disable=deprecated-module
-import optparse
-import os
-import re
-import shutil
-import subprocess
-import sys
-import tempfile
-import time
-import multiprocessing
-import signal
-import psutil
-
-import requests
-# pylint: disable=import-error
-from six.moves.urllib_parse import urlsplit
-
-import mockbuild.config as mock_config
-
-from stxRpmUtils import splitRpmFilename
-
-# all of the variables below are substituted by the build system
-__VERSION__="2.7"
-SYSCONFDIR="/etc"
-PYTHONDIR="/usr/lib/python3.6/site-packages"
-PKGPYTHONDIR="/usr/lib/python3.6/site-packages/mockbuild"
-MOCKCONFDIR = os.path.join(SYSCONFDIR, "mock")
-# end build system subs
-
-mockconfig_path = '/etc/mock'
-
-def rpmName(path):
-    filename = os.path.basename(path)
-    (n, v, r, e, a) = splitRpmFilename(filename)
-    return n
-
-def createrepo(path):
-    global max_workers
-    if os.path.exists(path + '/repodata/repomd.xml'):
-        comm = ['/usr/bin/createrepo_c', '--update', '--retain-old-md', "%d" % max_workers, "--workers", "%d" % max_workers, path]
-    else:
-        comm = ['/usr/bin/createrepo_c', "--workers", "%d" % max_workers, path]
-    cmd = subprocess.Popen(
-        comm, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    out, err = cmd.communicate()
-    return out, err
-
-
-g_opts = optparse.Values()
-
-def parse_args(args):
-    parser = optparse.OptionParser('\nmockchain -r mockcfg pkg1 [pkg2] [pkg3]')
-    parser.add_option(
-        '-r', '--root', default=None, dest='chroot',
-        metavar="CONFIG",
-        help="chroot config name/base to use in the mock build")
-    parser.add_option(
-        '-l', '--localrepo', default=None,
-        help="local path for the local repo, defaults to making its own")
-    parser.add_option(
-        '-c', '--continue', default=False, action='store_true',
-        dest='cont',
-        help="if a pkg fails to build, continue to the next one")
-    parser.add_option(
-        '-a', '--addrepo', default=[], action='append',
-        dest='repos',
-        help="add these repo baseurls to the chroot's yum config")
-    parser.add_option(
-        '--recurse', default=False, action='store_true',
-        help="if more than one pkg and it fails to build, try to build the rest and come back to it")
-    parser.add_option(
-        '--log', default=None, dest='logfile',
-        help="log to the file named by this option, defaults to not logging")
-    parser.add_option(
-        '--workers', default=1, dest='max_workers',
-        help="number of parallel build jobs")
-    parser.add_option(
-        '--worker-resources', default="", dest='worker_resources',
-        help="colon seperated list, how much mem in gb for each workers temfs")
-    parser.add_option(
-        '--basedir', default='/var/lib/mock', dest='basedir',
-        help="path to workspace")
-    parser.add_option(
-        '--tmp_prefix', default=None, dest='tmp_prefix',
-        help="tmp dir prefix - will default to username-pid if not specified")
-    parser.add_option(
-        '-m', '--mock-option', default=[], action='append',
-        dest='mock_option',
-        help="option to pass directly to mock")
-    parser.add_option(
-        '--mark-slow-name', default=[], action='append',
-        dest='slow_pkg_names_raw',
-        help="package name that is known to build slowly")
-    parser.add_option(
-        '--mark-slow-path', default=[], action='append',
-        dest='slow_pkgs_raw',
-        help="package path that is known to build slowly")
-    parser.add_option(
-        '--mark-big-name', default=[], action='append',
-        dest='big_pkg_names_raw',
-        help="package name that is known to require a lot of disk space to build")
-    parser.add_option(
-        '--mark-big-path', default=[], action='append',
-        dest='big_pkgs_raw',
-        help="package path that is known to require a lot of disk space to build")
-    parser.add_option(
-        '--srpm-dependency-file', default=None,
-        dest='srpm_dependency_file',
-        help="path to srpm dependency file")
-    parser.add_option(
-        '--rpm-dependency-file', default=None,
-        dest='rpm_dependency_file',
-        help="path to rpm dependency file")
-    parser.add_option(
-        '--rpm-to-srpm-map-file', default=None,
-        dest='rpm_to_srpm_map_file',
-        help="path to rpm to srpm map file")
-
-    opts, args = parser.parse_args(args)
-    if opts.recurse:
-        opts.cont = True
-
-    if not opts.chroot:
-        print("You must provide an argument to -r for the mock chroot")
-        sys.exit(1)
-
-    if len(sys.argv) < 3:
-        print("You must specify at least 1 package to build")
-        sys.exit(1)
-
-    return opts, args
-
-
-REPOS_ID = []
-
-slow_pkg_names={}
-slow_pkgs={}
-big_pkg_names={}
-big_pkgs={}
-
-def generate_repo_id(baseurl):
-    """ generate repository id for yum.conf out of baseurl """
-    repoid = "/".join(baseurl.split('//')[1:]).replace('/', '_')
-    repoid = re.sub(r'[^a-zA-Z0-9_]', '', repoid)
-    suffix = ''
-    i = 1
-    while repoid + suffix in REPOS_ID:
-        suffix = str(i)
-        i += 1
-    repoid = repoid + suffix
-    REPOS_ID.append(repoid)
-    return repoid
-
-
-def set_build_idx(infile, destfile, build_idx, tmpfs_size_gb, opts):
-    # log(opts.logfile, "set_build_idx: infile=%s, destfile=%s, build_idx=%d, tmpfs_size_gb=%d" % (infile, destfile, build_idx, tmpfs_size_gb))
-
-    try:
-        with open(infile) as f:
-            code = compile(f.read(), infile, 'exec')
-        # pylint: disable=exec-used
-        exec(code)
-
-        config_opts['root'] = config_opts['root'].replace('b0', 'b{0}'.format(build_idx))
-        config_opts['rootdir'] = config_opts['rootdir'].replace('b0', 'b{0}'.format(build_idx))
-        config_opts['cache_topdir'] = config_opts['cache_topdir'].replace('b0', 'b{0}'.format(build_idx))
-        # log(opts.logfile, "set_build_idx: root=%s" % config_opts['root'])
-        # log(opts.logfile, "set_build_idx: cache_topdir=%s" % config_opts['cache_topdir'])
-        if tmpfs_size_gb > 0:
-            config_opts['plugin_conf']['tmpfs_enable'] = True
-            config_opts['plugin_conf']['tmpfs_opts'] = {}
-            config_opts['plugin_conf']['tmpfs_opts']['required_ram_mb'] = 1024
-            config_opts['plugin_conf']['tmpfs_opts']['max_fs_size'] = "%dg" % tmpfs_size_gb
-            config_opts['plugin_conf']['tmpfs_opts']['mode'] = '0755'
-            config_opts['plugin_conf']['tmpfs_opts']['keep_mounted'] = True
-            # log(opts.logfile, "set_build_idx: plugin_conf->tmpfs_enable=%s" % config_opts['plugin_conf']['tmpfs_enable'])
-            # log(opts.logfile, "set_build_idx: plugin_conf->tmpfs_opts->max_fs_size=%s" % config_opts['plugin_conf']['tmpfs_opts']['max_fs_size'])
-
-        with open(destfile, 'w') as br_dest:
-            for k, v in list(config_opts.items()):
-                br_dest.write("config_opts[%r] = %r\n" % (k, v))
-
-        try:
-            log(opts.logfile, "set_build_idx: os.makedirs %s" % config_opts['cache_topdir'])
-            if not os.path.isdir(config_opts['cache_topdir']):
-                os.makedirs(config_opts['cache_topdir'], exist_ok=True)
-        except (IOError, OSError):
-            return False, "Could not create dir: %s" % config_opts['cache_topdir']
-
-        cache_dir = "%s/%s/mock" % (config_opts['basedir'], config_opts['root'])
-        try:
-            log(opts.logfile, "set_build_idx: os.makedirs %s" % cache_dir)
-            if not os.path.isdir(cache_dir):
-                os.makedirs(cache_dir)
-        except (IOError, OSError):
-            return False, "Could not create dir: %s" % cache_dir
-
-        return True, ''
-    except (IOError, OSError):
-        return False, "Could not write mock config to %s" % destfile
-
-    return True, ''
-
-def set_basedir(infile, destfile, basedir, opts):
-    log(opts.logfile, "set_basedir: infile=%s, destfile=%s, basedir=%s" % (infile, destfile, basedir))
-    try:
-        with open(infile) as f:
-            code = compile(f.read(), infile, 'exec')
-        # pylint: disable=exec-used
-        exec(code)
-
-        config_opts['basedir'] = basedir
-        config_opts['resultdir'] = '{0}/result'.format(basedir)
-        config_opts['backup_base_dir'] = '{0}/backup'.format(basedir)
-        config_opts['root'] = 'mock/b0'
-        config_opts['cache_topdir'] = '{0}/cache/b0'.format(basedir)
-        config_opts['rootdir'] = '{0}/mock/b0/root'.format(basedir)
-
-        with open(destfile, 'w') as br_dest:
-            for k, v in list(config_opts.items()):
-                br_dest.write("config_opts[%r] = %r\n" % (k, v))
-        return True, ''
-    except (IOError, OSError):
-        return False, "Could not write mock config to %s" % destfile
-
-    return True, ''
-
-def add_local_repo(infile, destfile, baseurl, repoid=None):
-    """take a mock chroot config and add a repo to it's yum.conf
-       infile = mock chroot config file
-       destfile = where to save out the result
-       baseurl = baseurl of repo you wish to add"""
-    global config_opts
-
-    try:
-        with open(infile) as f:
-            code = compile(f.read(), infile, 'exec')
-        # pylint: disable=exec-used
-        exec(code)
-        if not repoid:
-            repoid = generate_repo_id(baseurl)
-        else:
-            REPOS_ID.append(repoid)
-        localyumrepo = """
-[%s]
-name=%s
-baseurl=%s
-enabled=1
-skip_if_unavailable=1
-metadata_expire=0
-cost=1
-best=1
-""" % (repoid, baseurl, baseurl)
-
-        config_opts['yum.conf'] += localyumrepo
-        with open(destfile, 'w') as br_dest:
-            for k, v in list(config_opts.items()):
-                br_dest.write("config_opts[%r] = %r\n" % (k, v))
-        return True, ''
-    except (IOError, OSError):
-        return False, "Could not write mock config to %s" % destfile
-
-    return True, ''
-
-
-def do_build(opts, cfg, pkg):
-
-    # returns 0, cmd, out, err = failure
-    # returns 1, cmd, out, err  = success
-    # returns 2, None, None, None = already built
-
-    signal.signal(signal.SIGTERM, child_signal_handler)
-    signal.signal(signal.SIGINT, child_signal_handler)
-    signal.signal(signal.SIGHUP, child_signal_handler)
-    signal.signal(signal.SIGABRT, child_signal_handler)
-    s_pkg = os.path.basename(pkg)
-    pdn = s_pkg.replace('.src.rpm', '')
-    resdir = '%s/%s' % (opts.local_repo_dir, pdn)
-    resdir = os.path.normpath(resdir)
-    if not os.path.exists(resdir):
-        os.makedirs(resdir)
-
-    success_file = resdir + '/success'
-    fail_file = resdir + '/fail'
-
-    if os.path.exists(success_file):
-        # return 2, None, None, None
-        sys.exit(2)
-
-    # clean it up if we're starting over :)
-    if os.path.exists(fail_file):
-        os.unlink(fail_file)
-
-    if opts.uniqueext == '':
-        mockcmd = ['/usr/bin/mock',
-                   '--configdir', opts.config_path,
-                   '--resultdir', resdir,
-                   '--root', cfg, ]
-    else:
-        mockcmd = ['/usr/bin/mock',
-                   '--configdir', opts.config_path,
-                   '--resultdir', resdir,
-                   '--uniqueext', opts.uniqueext,
-                   '--root', cfg, ]
-
-    # Ensure repo is up-to-date.
-    # Note: Merely adding --update to mockcmd failed to update
-    mockcmd_update=mockcmd
-    mockcmd_update.append('--update')
-    cmd = subprocess.Popen(
-        mockcmd_update, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    out, err = cmd.communicate()
-    if cmd.returncode != 0:
-        if (isinstance(err, bytes)):
-            err = err.decode("utf-8")
-        sys.stderr.write(err)
-
-    # heuristic here, if user pass for mock "-d foo", but we must be care to leave
-    # "-d'foo bar'" or "--define='foo bar'" as is
-    compiled_re_1 = re.compile(r'^(-\S)\s+(.+)')
-    compiled_re_2 = re.compile(r'^(--[^ =])[ =](\.+)')
-    for option in opts.mock_option:
-        r_match = compiled_re_1.match(option)
-        if r_match:
-            mockcmd.extend([r_match.group(1), r_match.group(2)])
-        else:
-            r_match = compiled_re_2.match(option)
-            if r_match:
-                mockcmd.extend([r_match.group(1), r_match.group(2)])
-            else:
-                mockcmd.append(option)
-
-    print('building %s' % s_pkg)
-    mockcmd.append(pkg)
-    cmd = subprocess.Popen(
-        mockcmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    out, err = cmd.communicate()
-    if cmd.returncode == 0:
-        with open(success_file, 'w') as f:
-            f.write('done\n')
-        ret = 1
-    else:
-        if (isinstance(err, bytes)):
-            err = err.decode("utf-8")
-        sys.stderr.write(err)
-        with open(fail_file, 'w') as f:
-            f.write('undone\n')
-        ret = 0
-
-    # return ret, cmd, out, err
-    sys.exit(ret)
-
-
-def log(lf, msg):
-    if lf:
-        now = time.time()
-        try:
-            with open(lf, 'a') as f:
-                f.write(str(now) + ':' + msg + '\n')
-        except (IOError, OSError) as e:
-            print('Could not write to logfile %s - %s' % (lf, str(e)))
-    print(msg)
-
-
-config_opts = {}
-
-worker_data = []
-workers = 0
-max_workers = 1
-
-build_env = []
-
-failed = []
-built_pkgs = []
-
-local_repo_dir = ""
-
-pkg_to_name={}
-name_to_pkg={}
-srpm_dependencies_direct={}
-rpm_dependencies_direct={}
-rpm_to_srpm_map={}
-no_dep_list = [ "bash", "kernel" , "kernel-rt" ]
-
-
-def init_build_env(slots, opts, config_opts_in):
-    global build_env
-
-    orig_chroot_name=config_opts_in['chroot_name']
-    orig_mock_config = os.path.join(opts.config_path, "{0}.cfg".format(orig_chroot_name))
-    # build_env.append({'state': 'Idle', 'cfg': orig_mock_config})
-    for i in range(0,slots):
-        new_chroot_name = "{0}.b{1}".format(orig_chroot_name, i)
-        new_mock_config = os.path.join(opts.config_path, "{0}.cfg".format(new_chroot_name))
-        tmpfs_size_gb = 0
-        if opts.worker_resources == "":
-            if i > 0:
-                tmpfs_size_gb = 2 * (1 + slots - i)
-        else:
-            resource_array=opts.worker_resources.split(':')
-            if i < len(resource_array):
-                tmpfs_size_gb=int(resource_array[i])
-            else:
-                log(opts.logfile, "Error: worker-resources argument '%s' does not supply info for all %d workers" % (opts.worker_resources, slots))
-                sys.exit(1)
-        if i == 0 and tmpfs_size_gb != 0:
-            log(opts.logfile, "Error: worker-resources argument '%s' must pass '0' as first value" % (opts.worker_resources, slots))
-            sys.exit(1)
-        build_env.append({'state': 'Idle', 'cfg': new_mock_config, 'fs_size_gb': tmpfs_size_gb})
-
-        res, msg = set_build_idx(orig_mock_config, new_mock_config, i, tmpfs_size_gb, opts)
-        if not res:
-            log(opts.logfile, "Error: Could not write out local config: %s" % msg)
-            sys.exit(1)
-
-
-idle_build_env_last_awarded = 0
-def get_idle_build_env(slots):
-    global build_env
-    global idle_build_env_last_awarded
-    visited = 0
-
-    if slots < 1:
-        return -1
-
-    i = idle_build_env_last_awarded - 1
-    if i < 0 or i >= slots:
-        i = slots - 1
-
-    while visited < slots:
-        if build_env[i]['state'] == 'Idle':
-            build_env[i]['state'] = 'Busy'
-            idle_build_env_last_awarded = i
-            return i
-        visited = visited + 1
-        i = i - 1
-        if i < 0:
-            i = slots - 1
-    return -1
-
-def release_build_env(idx):
-    global build_env
-
-    build_env[idx]['state'] = 'Idle'
-
-def get_best_rc(a, b):
-    print("get_best_rc: a=%s" % str(a))
-    print("get_best_rc: b=%s" % str(b))
-    if (b == {}) and (a != {}):
-        return a
-    if (a == {}) and (b != {}):
-        return b
-
-    if (b['build_name'] is None) and (not a['build_name'] is None):
-        return a
-    if (a['build_name'] is None) and (not b['build_name'] is None):
-        return b
-
-    if a['unbuilt_deps'] < b['unbuilt_deps']:
-        return a
-    if b['unbuilt_deps'] < a['unbuilt_deps']:
-        return b
-
-    if a['depth'] < b['depth']:
-        return a
-    if b['depth'] < a['depth']:
-        return b
-
-    print("get_best_rc: uncertain %s vs %s" % (a,b))
-    return a
-
-unbuilt_dep_list_print=False
-def unbuilt_dep_list(name, unbuilt_pkg_names, depth, checked=None):
-    global srpm_dependencies_direct
-    global rpm_dependencies_direct
-    global rpm_to_srpm_map
-    global no_dep_list
-    global unbuilt_dep_list_print
-
-    first_iteration=False
-    unbuilt = []
-    if name in no_dep_list:
-        return unbuilt
-
-    if checked is None:
-        first_iteration=True
-        checked=[]
-
-    # Count unbuild dependencies
-    if first_iteration:
-        dependencies_direct=srpm_dependencies_direct
-    else:
-        dependencies_direct=rpm_dependencies_direct
-
-    if name in dependencies_direct:
-        for rdep in dependencies_direct[name]:
-            sdep='???'
-            if rdep in rpm_to_srpm_map:
-                sdep = rpm_to_srpm_map[rdep]
-            if rdep != name and sdep != name and not rdep in checked:
-                if (not first_iteration) and (sdep in no_dep_list):
-                    continue
-                checked.append(rdep)
-                if sdep in unbuilt_pkg_names:
-                    if not sdep in unbuilt:
-                        unbuilt.append(sdep)
-                if depth > 0:
-                    child_unbuilt = unbuilt_dep_list(rdep, unbuilt_pkg_names, depth-1, checked)
-                    for sub_sdep in child_unbuilt:
-                        if sub_sdep != name:
-                            if not sub_sdep in unbuilt:
-                                unbuilt.append(sub_sdep)
-
-    return unbuilt
-
-def can_build_at_idx(build_idx, name, opts):
-    global pkg_to_name
-    global name_to_pkg
-    global big_pkgs
-    global big_pkg_names
-    global slow_pkgs
-    global slow_pkg_names
-    global build_env
-
-    fs_size_gb = 0
-    size_gb = 0
-    speed = 0
-    pkg = name_to_pkg[name]
-    if name in big_pkg_names:
-        size_gb=big_pkg_names[name]
-    if pkg in big_pkgs:
-        size_gb=big_pkgs[pkg]
-    if name in slow_pkg_names:
-        speed=slow_pkg_names[name]
-    if pkg in slow_pkgs:
-        speed=slow_pkgs[pkg]
-    fs_size_gb = build_env[build_idx]['fs_size_gb']
-    return fs_size_gb == 0 or fs_size_gb >= size_gb
-
-def schedule(build_idx, pkgs, opts):
-    global worker_data
-    global pkg_to_name
-    global name_to_pkg
-    global big_pkgs
-    global big_pkg_names
-    global slow_pkgs
-    global slow_pkg_names
-
-    unbuilt_pkg_names=[]
-    building_pkg_names=[]
-    unprioritized_pkg_names=[]
-
-    for pkg in pkgs:
-        name = pkg_to_name[pkg]
-        unbuilt_pkg_names.append(name)
-        unprioritized_pkg_names.append(name)
-
-    prioritized_pkg_names=[]
-
-    for wd in worker_data:
-        pkg = wd['pkg']
-        if not pkg is None:
-            name = pkg_to_name[pkg]
-            building_pkg_names.append(name)
-
-    # log(opts.logfile, "schedule: build_idx=%d  start" % build_idx)
-    if len(big_pkg_names) or len(big_pkgs):
-        next_unprioritized_pkg_names = unprioritized_pkg_names[:]
-        for name in unprioritized_pkg_names:
-            pkg = name_to_pkg[name]
-            if name in big_pkg_names or pkg in big_pkgs:
-                prioritized_pkg_names.append(name)
-                next_unprioritized_pkg_names.remove(name)
-        unprioritized_pkg_names = next_unprioritized_pkg_names[:]
-
-    if len(slow_pkg_names) or len(slow_pkgs):
-        next_unprioritized_pkg_names = unprioritized_pkg_names[:]
-        for name in unprioritized_pkg_names:
-            pkg = name_to_pkg[name]
-            if name in slow_pkg_names or pkg in slow_pkgs:
-                if can_build_at_idx(build_idx, name, opts):
-                    prioritized_pkg_names.append(name)
-                    next_unprioritized_pkg_names.remove(name)
-        unprioritized_pkg_names = next_unprioritized_pkg_names[:]
-
-    for name in unprioritized_pkg_names:
-        if can_build_at_idx(build_idx, name, opts):
-            prioritized_pkg_names.append(name)
-
-    name_out = schedule2(build_idx, prioritized_pkg_names, unbuilt_pkg_names, building_pkg_names, opts)
-    if not name_out is None:
-        pkg_out = name_to_pkg[name_out]
-    else:
-        pkg_out = None
-        # log(opts.logfile, "schedule: failed to translate '%s' to a pkg" % name_out)
-    # log(opts.logfile, "schedule: build_idx=%d  end: out = %s -> %s" % (build_idx, str(name_out), str(pkg_out)))
-    return pkg_out
-
-
-def schedule2(build_idx, pkg_names, unbuilt_pkg_names, building_pkg_names, opts):
-    global pkg_to_name
-    global name_to_pkg
-    global no_dep_list
-
-    max_depth = 3
-
-    if len(pkg_names) == 0:
-        return None
-
-    unbuilt_deps={}
-    building_deps={}
-    for depth in range(max_depth,-1,-1):
-        unbuilt_deps[depth]={}
-        building_deps[depth]={}
-
-    for depth in range(max_depth,-1,-1):
-        checked=[]
-        reordered_pkg_names = pkg_names[:]
-        # for name in reordered_pkg_names:
-        while len(reordered_pkg_names):
-            name = reordered_pkg_names.pop(0)
-            if name in checked:
-                continue
-
-            # log(opts.logfile, "checked.append(%s)" % name)
-            checked.append(name)
-
-            pkg = name_to_pkg[name]
-            # log(opts.logfile, "schedule2: check '%s', depth %d" % (name, depth))
-            if not name in unbuilt_deps[depth]:
-                unbuilt_deps[depth][name] = unbuilt_dep_list(name, unbuilt_pkg_names, depth)
-            if not name in building_deps[depth]:
-                building_deps[depth][name] = unbuilt_dep_list(name, building_pkg_names, depth)
-            # log(opts.logfile, "schedule2: unbuilt deps for pkg=%s, depth=%d: %s" % (name, depth, unbuilt_deps[depth][name]))
-            # log(opts.logfile, "schedule2: building deps for pkg=%s, depth=%d: %s" % (name, depth, building_deps[depth][name]))
-            if len(unbuilt_deps[depth][name]) == 0 and len(building_deps[depth][name]) == 0:
-                if can_build_at_idx(build_idx, name, opts):
-                    log(opts.logfile, "schedule2: no unbuilt deps for '%s', searching at depth %d" % (name, depth))
-                    return name
-                else:
-                    # log(opts.logfile, "schedule2: Can't build '%s' on 'b%d'" % (name, build_idx))
-                    continue
-
-            if not name in unbuilt_deps[0]:
-                unbuilt_deps[0][name] = unbuilt_dep_list(name, unbuilt_pkg_names, 0)
-            if not name in building_deps[0]:
-                building_deps[0][name] = unbuilt_dep_list(name, building_pkg_names, 0)
-            # log(opts.logfile, "schedule2: unbuilt deps for pkg=%s, depth=%d: %s" % (name, 0, unbuilt_deps[0][name]))
-            # log(opts.logfile, "schedule2: building deps for pkg=%s, depth=%d: %s" % (name, 0, building_deps[0][name]))
-            if (len(building_deps[depth][name]) == 0 and len(unbuilt_deps[depth][name]) == 1 and unbuilt_deps[depth][name][0] in no_dep_list) or (len(unbuilt_deps[depth][name]) == 0 and len(building_deps[depth][name]) == 1 and building_deps[depth][name][0] in no_dep_list):
-                if len(unbuilt_deps[0][name]) == 0 and len(building_deps[0][name]) == 0:
-                    if can_build_at_idx(build_idx, name, opts):
-                        log(opts.logfile, "schedule2: no unbuilt deps for '%s' except for indirect kernel dep, searching at depth %d" % (name, depth))
-                        return name
-                    else:
-                        # log(opts.logfile, "schedule2: Can't build '%s' on 'b%d'" % (name, build_idx))
-                        continue
-
-            loop = False
-            for dep_name in unbuilt_deps[depth][name]:
-                if name == dep_name:
-                    continue
-
-                # log(opts.logfile, "name=%s depends on dep_name=%s, depth=%d" % (name, dep_name, depth))
-                if dep_name in checked:
-                    continue
-
-                # log(opts.logfile, "schedule2: check '%s' indirect" % dep_name)
-                if not dep_name in unbuilt_deps[depth]:
-                    unbuilt_deps[depth][dep_name] = unbuilt_dep_list(dep_name, unbuilt_pkg_names, depth)
-                if not dep_name in building_deps[depth]:
-                    building_deps[depth][dep_name] = unbuilt_dep_list(dep_name, building_pkg_names, depth)
-                # log(opts.logfile, "schedule2: deps: unbuilt deps for %s -> %s, depth=%d: %s" % (name, dep_name, depth, unbuilt_deps[depth][dep_name]))
-                # log(opts.logfile, "schedule2: deps: building deps for %s -> %s, depth=%d: %s" % (name, dep_name, depth, building_deps[depth][dep_name]))
-                if len(unbuilt_deps[depth][dep_name]) == 0 and len(building_deps[depth][dep_name]) == 0:
-                    if can_build_at_idx(build_idx, dep_name, opts):
-                        log(opts.logfile, "schedule2: deps: no unbuilt deps for '%s', working towards '%s', searching at depth %d" % (dep_name, name, depth))
-                        return dep_name
-
-                if not dep_name in unbuilt_deps[0]:
-                    unbuilt_deps[0][dep_name] = unbuilt_dep_list(dep_name, unbuilt_pkg_names, 0)
-                if not dep_name in building_deps[0]:
-                    building_deps[0][dep_name] = unbuilt_dep_list(dep_name, building_pkg_names, 0)
-                # log(opts.logfile, "schedule2: deps: unbuilt deps for %s -> %s, depth=%d: %s" % (name, dep_name, 0, unbuilt_deps[0][dep_name]))
-                # log(opts.logfile, "schedule2: deps: building deps for %s -> %s, depth=%d: %s" % (name, dep_name, 0, building_deps[0][dep_name]))
-                if (len(building_deps[depth][dep_name]) == 0 and len(unbuilt_deps[depth][dep_name]) == 1 and unbuilt_deps[depth][dep_name][0] in no_dep_list) or (len(unbuilt_deps[depth][dep_name]) == 0 and len(building_deps[depth][dep_name]) == 1 and building_deps[depth][dep_name][0] in no_dep_list):
-                    if len(unbuilt_deps[0][dep_name]) == 0 and len(building_deps[0][dep_name]) == 0:
-                        if can_build_at_idx(build_idx, dep_name, opts):
-                            log(opts.logfile, "schedule2: no unbuilt deps for '%s' except for indirect kernel dep, working towards '%s', searching at depth %d" % (dep_name, name, depth))
-                            return dep_name
-
-                if name in unbuilt_deps[0][dep_name]:
-                    loop = True
-                    # log(opts.logfile, "schedule2: loop detected: %s <-> %s" % (name, dep_name))
-
-            if loop and len(building_deps[depth][name]) == 0:
-                log(opts.logfile, "schedule2: loop detected, try to build '%s'" % name)
-                return name
-
-            for dep_name in unbuilt_deps[depth][name]:
-                if dep_name in reordered_pkg_names:
-                    # log(opts.logfile, "schedule2: promote %s to work toward %s" % (dep_name, name))
-                    reordered_pkg_names.remove(dep_name)
-                    reordered_pkg_names.insert(0,dep_name)
-
-    # log(opts.logfile, "schedule2: Nothing buildable at this time")
-    return None
-
-
-def read_deps(opts):
-    read_srpm_deps(opts)
-    read_rpm_deps(opts)
-    read_map_deps(opts)
-
-def read_srpm_deps(opts):
-    global srpm_dependencies_direct
-
-    if opts.srpm_dependency_file == None:
-        return
-
-    if not os.path.exists(opts.srpm_dependency_file):
-        log(opts.logfile, "File not found: %s" % opts.srpm_dependency_file)
-        sys.exit(1)
-
-    with open(opts.srpm_dependency_file) as f:
-        lines = f.readlines()
-        for line in lines:
-            (name,deps) = line.rstrip().split(';')
-            srpm_dependencies_direct[name]=deps.split(',')
-
-def read_rpm_deps(opts):
-    global rpm_dependencies_direct
-
-    if opts.rpm_dependency_file == None:
-        return
-
-    if not os.path.exists(opts.rpm_dependency_file):
-        log(opts.logfile, "File not found: %s" % opts.rpm_dependency_file)
-        sys.exit(1)
-
-    with open(opts.rpm_dependency_file) as f:
-        lines = f.readlines()
-        for line in lines:
-            (name,deps) = line.rstrip().split(';')
-            rpm_dependencies_direct[name]=deps.split(',')
-
-def read_map_deps(opts):
-    global rpm_to_srpm_map
-
-    if opts.rpm_to_srpm_map_file == None:
-        return
-
-    if not os.path.exists(opts.rpm_to_srpm_map_file):
-        log(opts.logfile, "File not found: %s" % opts.rpm_to_srpm_map_file)
-        sys.exit(1)
-
-    with open(opts.rpm_to_srpm_map_file) as f:
-        lines = f.readlines()
-        for line in lines:
-            (rpm,srpm) = line.rstrip().split(';')
-            rpm_to_srpm_map[rpm]=srpm
-
-
-def reaper(opts):
-    global built_pkgs
-    global failed
-    global worker_data
-    global workers
-
-    reaped = 0
-    need_createrepo = False
-    last_reaped = -1
-    while reaped > last_reaped:
-        last_reaped = reaped
-        for wd in worker_data:
-            p = wd['proc']
-            ret = p.exitcode
-            if ret is not None:
-                pkg = wd['pkg']
-                b = int(wd['build_index'])
-                p.join()
-                worker_data.remove(wd)
-                workers = workers - 1
-                reaped = reaped + 1
-                release_build_env(b)
-
-                log(opts.logfile, "End build on 'b%d': %s" % (b, pkg))
-
-                if ret == 0:
-                    failed.append(pkg)
-                    log(opts.logfile, "Error building %s on 'b%d'." % (os.path.basename(pkg), b))
-                    if opts.recurse and not stop_signal:
-                        log(opts.logfile, "Will try to build again (if some other package will succeed).")
-                    else:
-                        log(opts.logfile, "See logs/results in %s" % opts.local_repo_dir)
-                elif ret == 1:
-                    log(opts.logfile, "Success building %s on 'b%d'" % (os.path.basename(pkg), b))
-                    built_pkgs.append(pkg)
-                    need_createrepo = True
-                elif ret == 2:
-                    log(opts.logfile, "Skipping already built pkg %s" % os.path.basename(pkg))
-
-    if need_createrepo:
-        # createrepo with the new pkgs
-        err = createrepo(opts.local_repo_dir)[1]
-        if err.strip():
-            log(opts.logfile, "Error making local repo: %s" % opts.local_repo_dir)
-            log(opts.logfile, "Err: %s" % err)
-
-    return reaped
-
-stop_signal = False
-
-def on_terminate(proc):
-    print("process {} terminated with exit code {}".format(proc, proc.returncode))
-
-def kill_proc_and_descentents(parent, need_stop=False, verbose=False):
-    global g_opts
-
-    if need_stop:
-        if verbose:
-            log(g_opts.logfile, "Stop %d" % parent.pid)
-
-        try:
-            parent.send_signal(signal.SIGSTOP)
-        except:
-            # perhaps mock still running as root, give it a sec to drop pivledges and try again
-            time.sleep(1)
-            parent.send_signal(signal.SIGSTOP)
-
-    try:
-        children = parent.children(recursive=False)
-    except:
-        children = []
-
-    for p in children:
-        kill_proc_and_descentents(p, need_stop=True, verbose=verbose)
-
-    if verbose:
-        log(g_opts.logfile, "Terminate %d" % parent.pid)
-
-    # parent.send_signal(signal.SIGTERM)
-    try:
-        parent.terminate()
-    except:
-        # perhaps mock still running as root, give it a sec to drop pivledges and try again
-        time.sleep(1)
-        parent.terminate()
-
-    if need_stop:
-        if verbose:
-            log(g_opts.logfile, "Continue %d" % parent.pid)
-
-        parent.send_signal(signal.SIGCONT)
-
-
-def child_signal_handler(signum, frame):
-    global g_opts
-    my_pid = os.getpid()
-    # log(g_opts.logfile, "--------- child %d recieved signal %d" % (my_pid, signum))
-    p = psutil.Process(my_pid)
-    kill_proc_and_descentents(p)
-    try:
-        sys.exit(0)
-    except SystemExit as e:
-        os._exit(0)
-
-def signal_handler(signum, frame):
-    global g_opts
-    global stop_signal
-    global workers
-    global worker_data
-    stop_signal = True
-
-    # Signal processes to complete
-    log(g_opts.logfile, "recieved signal %d, Terminating children" % signum)
-    for wd in worker_data:
-        p = wd['proc']
-        ret = p.exitcode
-        if ret is None:
-            # log(g_opts.logfile, "terminate child %d" % p.pid)
-            p.terminate()
-        else:
-            log(g_opts.logfile, "child return code was %d" % ret)
-
-    # Wait for remaining processes to complete
-    log(g_opts.logfile, "===== wait for signaled jobs to complete =====")
-    while len(worker_data) > 0:
-        log(g_opts.logfile, "    remaining workers: %d" % workers)
-        reaped = reaper(g_opts)
-        if reaped == 0:
-            time.sleep(0.1)
-
-    try:
-        sys.exit(1)
-    except SystemExit as e:
-        os._exit(1)
-
-def main(args):
-    opts, args = parse_args(args)
-    # take mock config + list of pkgs
-
-    global g_opts
-    global stop_signal
-    global build_env
-    global worker_data
-    global workers
-    global max_workers
-
-    global slow_pkg_names
-    global slow_pkgs
-    global big_pkg_names
-    global big_pkgs
-    max_workers = int(opts.max_workers)
-
-    global failed
-    global built_pkgs
-
-    cfg = opts.chroot
-    pkgs = args[1:]
-
-    # transform slow/big package options into dictionaries
-    for line in opts.slow_pkg_names_raw:
-        speed,name = line.split(":")
-        if speed != "":
-            slow_pkg_names[name]=int(speed)
-    for line in opts.slow_pkgs_raw:
-        speed,pkg = line.split(":")
-        if speed != "":
-            slow_pkgs[pkg]=int(speed)
-    for line in opts.big_pkg_names_raw:
-        size_gb,name = line.split(":")
-        if size_gb != "":
-            big_pkg_names[name]=int(size_gb)
-    for line in opts.big_pkgs_raw:
-        size_gb,pkg = line.split(":")
-        if size_gb != "":
-            big_pkgs[pkg]=int(size_gb)
-
-    # Set up a mapping between pkg path and pkg name
-    global pkg_to_name
-    global name_to_pkg
-    for pkg in pkgs:
-        if not pkg.endswith('.rpm'):
-            log(opts.logfile, "%s doesn't appear to be an rpm - skipping" % pkg)
-            continue
-
-        try:
-            name = rpmName(pkg)
-        except OSError as e:
-            print("Could not parse rpm %s" % pkg)
-            sys.exit(1)
-
-        pkg_to_name[pkg] = name
-        name_to_pkg[name] = pkg
-
-    read_deps(opts)
-
-    global config_opts
-    config_opts = mock_config.load_config(mockconfig_path, cfg, None, __VERSION__, PKGPYTHONDIR)
-
-    if not opts.tmp_prefix:
-        try:
-            opts.tmp_prefix = os.getlogin()
-        except OSError as e:
-            print("Could not find login name for tmp dir prefix add --tmp_prefix")
-            sys.exit(1)
-    pid = os.getpid()
-    opts.uniqueext = '%s-%s' % (opts.tmp_prefix, pid)
-
-    if opts.basedir != "/var/lib/mock":
-        opts.uniqueext = ''
-
-    # create a tempdir for our local info
-    if opts.localrepo:
-        local_tmp_dir = os.path.abspath(opts.localrepo)
-        if not os.path.exists(local_tmp_dir):
-            os.makedirs(local_tmp_dir)
-            os.chmod(local_tmp_dir, 0o755)
-    else:
-        pre = 'mock-chain-%s-' % opts.uniqueext
-        local_tmp_dir = tempfile.mkdtemp(prefix=pre, dir='/var/tmp')
-        os.chmod(local_tmp_dir, 0o755)
-
-    if opts.logfile:
-        opts.logfile = os.path.join(local_tmp_dir, opts.logfile)
-        if os.path.exists(opts.logfile):
-            os.unlink(opts.logfile)
-
-    log(opts.logfile, "starting logfile: %s" % opts.logfile)
-
-    opts.local_repo_dir = os.path.normpath(local_tmp_dir + '/results/' + config_opts['chroot_name'] + '/')
-
-    if not os.path.exists(opts.local_repo_dir):
-        os.makedirs(opts.local_repo_dir, mode=0o755)
-
-    local_baseurl = "file://%s" % opts.local_repo_dir
-    log(opts.logfile, "results dir: %s" % opts.local_repo_dir)
-    opts.config_path = os.path.normpath(local_tmp_dir + '/configs/' + config_opts['chroot_name'] + '/')
-
-    if not os.path.exists(opts.config_path):
-        os.makedirs(opts.config_path, mode=0o755)
-
-    log(opts.logfile, "config dir: %s" % opts.config_path)
-
-    my_mock_config = os.path.join(opts.config_path, "{0}.cfg".format(config_opts['chroot_name']))
-
-    # modify with localrepo
-    res, msg = add_local_repo(config_opts['config_file'], my_mock_config, local_baseurl, 'local_build_repo')
-    if not res:
-        log(opts.logfile, "Error: Could not write out local config: %s" % msg)
-        sys.exit(1)
-
-    for baseurl in opts.repos:
-        res, msg = add_local_repo(my_mock_config, my_mock_config, baseurl)
-        if not res:
-            log(opts.logfile, "Error: Could not add: %s to yum config in mock chroot: %s" % (baseurl, msg))
-            sys.exit(1)
-
-    res, msg = set_basedir(my_mock_config, my_mock_config, opts.basedir, opts)
-    if not res:
-        log(opts.logfile, "Error: Could not write out local config: %s" % msg)
-        sys.exit(1)
-
-    # these files needed from the mock.config dir to make mock run
-    for fn in ['site-defaults.cfg', 'logging.ini']:
-        pth = mockconfig_path + '/' + fn
-        shutil.copyfile(pth, opts.config_path + '/' + fn)
-
-    # createrepo on it
-    err = createrepo(opts.local_repo_dir)[1]
-    if err.strip():
-        log(opts.logfile, "Error making local repo: %s" % opts.local_repo_dir)
-        log(opts.logfile, "Err: %s" % err)
-        sys.exit(1)
-
-    init_build_env(max_workers, opts, config_opts)
-
-    download_dir = tempfile.mkdtemp()
-    downloaded_pkgs = {}
-    built_pkgs = []
-    try_again = True
-    to_be_built = pkgs
-    return_code = 0
-    num_of_tries = 0
-
-    g_opts = opts
-    signal.signal(signal.SIGTERM, signal_handler)
-    signal.signal(signal.SIGINT, signal_handler)
-    signal.signal(signal.SIGHUP, signal_handler)
-    signal.signal(signal.SIGABRT, signal_handler)
-
-    while try_again and not stop_signal:
-        num_of_tries += 1
-        failed = []
-
-        log(opts.logfile, "===== iteration %d start =====" % num_of_tries)
-
-        to_be_built_scheduled = to_be_built[:]
-
-        need_reap = False
-        while len(to_be_built_scheduled) > 0:
-            # Free up a worker
-            while need_reap or workers >= max_workers:
-                need_reap = False
-                reaped = reaper(opts)
-                if reaped == 0:
-                    time.sleep(0.1)
-
-            if workers < max_workers:
-                workers = workers + 1
-
-                b = get_idle_build_env(max_workers)
-                if b < 0:
-                    log(opts.logfile, "Failed to find idle build env for: %s" % pkg)
-                    workers = workers - 1
-                    need_reap = True
-                    continue
-
-                pkg = schedule(b, to_be_built_scheduled, opts)
-                if pkg is None:
-                    if workers <= 1:
-                        # Remember we have one build environmnet reserved, so can't test for zero workers
-                        log(opts.logfile, "failed to schedule from: %s" % to_be_built_scheduled)
-                        pkg = to_be_built_scheduled[0]
-                        log(opts.logfile, "All workers idle, forcing build of pkg=%s" % pkg)
-                    else:
-                        release_build_env(b)
-                        workers = workers - 1
-                        need_reap = True
-                        continue
-
-                to_be_built_scheduled.remove(pkg)
-
-                if not pkg.endswith('.rpm'):
-                    log(opts.logfile, "%s doesn't appear to be an rpm - skipping" % pkg)
-                    failed.append(pkg)
-                    release_build_env(b)
-                    need_reap = True
-                    continue
-
-                elif pkg.startswith('http://') or pkg.startswith('https://') or pkg.startswith('ftp://'):
-                    url = pkg
-                    try:
-                        log(opts.logfile, 'Fetching %s' % url)
-                        r = requests.get(url)
-                        # pylint: disable=no-member
-                        if r.status_code == requests.codes.ok:
-                            fn = urlsplit(r.url).path.rsplit('/', 1)[1]
-                            if 'content-disposition' in r.headers:
-                                _, params = cgi.parse_header(r.headers['content-disposition'])
-                                if 'filename' in params and params['filename']:
-                                    fn = params['filename']
-                            pkg = download_dir + '/' + fn
-                            with open(pkg, 'wb') as fd:
-                                for chunk in r.iter_content(4096):
-                                    fd.write(chunk)
-                    except Exception as e:
-                        log(opts.logfile, 'Error Downloading %s: %s' % (url, str(e)))
-                        failed.append(url)
-                        release_build_env(b)
-                        need_reap = True
-                        continue
-                    else:
-                        downloaded_pkgs[pkg] = url
-
-                log(opts.logfile, "Start build on 'b%d': %s" % (b, pkg))
-                # ret = do_build(opts, config_opts['chroot_name'], pkg)[0]
-                p = multiprocessing.Process(target=do_build, args=(opts, build_env[b]['cfg'], pkg))
-                worker_data.append({'proc': p, 'pkg': pkg, 'build_index': int(b)})
-                p.start()
-
-        # Wait for remaining processes to complete
-        log(opts.logfile, "===== wait for last jobs in iteration %d to complete =====" % num_of_tries)
-        while workers > 0:
-            reaped = reaper(opts)
-            if reaped == 0:
-                time.sleep(0.1)
-        log(opts.logfile, "===== iteration %d complete =====" % num_of_tries)
-
-        if failed and opts.recurse:
-            log(opts.logfile, "failed=%s" % failed)
-            log(opts.logfile, "to_be_built=%s" % to_be_built)
-            if len(failed) != len(to_be_built):
-                to_be_built = failed
-                try_again = True
-                log(opts.logfile, 'Some package succeeded, some failed.')
-                log(opts.logfile, 'Trying to rebuild %s failed pkgs, because --recurse is set.' % len(failed))
-            else:
-                if max_workers > 1:
-                    max_workers = 1
-                    to_be_built = failed
-                    try_again = True
-                    log(opts.logfile, 'Some package failed under parallel build.')
-                    log(opts.logfile, 'Trying to rebuild %s failed pkgs with single thread, because --recurse is set.' % len(failed))
-                else:
-                    log(opts.logfile, "")
-                    log(opts.logfile, "*** Build Failed ***")
-                    log(opts.logfile, "Tried %s times - following pkgs could not be successfully built:" % num_of_tries)
-                    log(opts.logfile, "*** Build Failed ***")
-                    for pkg in failed:
-                        msg = pkg
-                        if pkg in downloaded_pkgs:
-                            msg = downloaded_pkgs[pkg]
-                        log(opts.logfile, msg)
-                    log(opts.logfile, "")
-                    try_again = False
-        else:
-            try_again = False
-            if failed:
-                return_code = 2
-
-    # cleaning up our download dir
-    shutil.rmtree(download_dir, ignore_errors=True)
-
-    log(opts.logfile, "")
-    log(opts.logfile, "Results out to: %s" % opts.local_repo_dir)
-    log(opts.logfile, "")
-    log(opts.logfile, "Pkgs built: %s" % len(built_pkgs))
-    if built_pkgs:
-        if failed:
-            if len(built_pkgs):
-                log(opts.logfile, "Some packages successfully built in this order:")
-        else:
-            log(opts.logfile, "Packages successfully built in this order:")
-        for pkg in built_pkgs:
-            log(opts.logfile, pkg)
-    return return_code
-
-
-if __name__ == "__main__":
-    sys.exit(main(sys.argv))
diff --git a/build-tools/modify-build-cfg b/build-tools/modify-build-cfg
deleted file mode 100755
index 6c273f79..00000000
--- a/build-tools/modify-build-cfg
+++ /dev/null
@@ -1,205 +0,0 @@
-#!/bin/sh
-
-#
-# Copyright (c) 2018-2020 Wind River Systems, Inc.
-#
-# SPDX-License-Identifier: Apache-2.0
-#
-
-# This script modifies a mock configuration file (typically $MY_BUILD_CFG)
-# to add build time environment variables to the mock environment (things
-# like what branch we're building on, etc).
-#
-# For reasons of security, the host environment variables cannot normally be
-# passed through to the mock environment, so this scripts sets the variables
-# to literal values.
-#
-# usage: modify-build-cfg [file.cfg] [<layer>]
-#
-
-MODIFY_BUILD_CFG_DIR="$(dirname "$(readlink -f "${BASH_SOURCE[0]}" )" )"
-
-# Set PKG_MANAGER for our build environment.
-source "${MODIFY_BUILD_CFG_DIR}/pkg-manager-utils.sh"
-
-LAYER=${2:-$LAYER}
-
-# For backward compatibility.  Old repo location or new?
-CENTOS_REPO=${MY_REPO}/centos-repo
-if [ ! -d ${CENTOS_REPO} ]; then
-    CENTOS_REPO=${MY_REPO}/cgcs-centos-repo
-    if [ ! -d ${CENTOS_REPO} ]; then
-        echo "ERROR: directory ${MY_REPO}/centos-repo not found."
-        exit 1
-    fi
-fi
-
-# Preferred python
-if rpm -q --whatprovides --quiet python3; then
-    PYTHON_PKG=python3
-else
-    PYTHON_PKG=python2
-fi
-
-# Try to find a layer specific mock.cfg.proto
-MOCK_CFG_PROTO="${CENTOS_REPO}/mock.cfg.${LAYER}.proto"
-echo "==== Try MOCK_CFG_PROTO=$MOCK_CFG_PROTO ===="
-if [ ! -f "$MOCK_CFG_PROTO" ]; then
-    # Not present, Use default mock.cfg.proto
-    MOCK_CFG_PROTO="${CENTOS_REPO}/mock.cfg.proto"
-fi
-
-echo "==== Use MOCK_CFG_PROTO=$MOCK_CFG_PROTO ===="
-if [ ! -f "$MOCK_CFG_PROTO" ]; then
-   echo "ERROR: Couldn't find mock config prototype at '$MOCK_CFG_PROTO'"
-   exit 1
-fi
-
-if [ "${1}x" == "x" ]; then
-	FILE=$MY_BUILD_CFG
-else
-	FILE=$1
-fi
-
-if [ -f $MOCK_CFG_PROTO ]; then
-   if [ -f $FILE ]; then
-      NEWER=$(find "$MOCK_CFG_PROTO" -newer "$FILE")
-      if [ "x$NEWER" != "x" ]; then
-         \rm -f -v "$FILE"
-      fi
-   fi
-fi
-
-if [ ! -f $FILE ]; then
-   if [ -z $MY_BUILD_ENVIRONMENT ] || [ -z $MY_BUILD_DIR ] || [ -z $MY_REPO ]; then
-      echo "Can't create $FILE without MY_BUILD_ENVIRONMENT, MY_BUILD_DIR and MY_REPO environment variables"
-      exit 1
-   fi
-
-   echo "Recreating $FILE"
-   \cp -f -v "$MOCK_CFG_PROTO" "$FILE"
-   if [ $? -ne 0 ]; then
-      echo "Couldn't find config file '$FILE', nor construct it from '$MOCK_CFG_PROTO'"
-      exit 1
-   fi
-
-   # eg: LOCAL_BASE/MY_BUILD_DIR => http://127.0.0.1:8088/MY_BUILD_DIR
-   sed -i "s%LOCAL_BASE%http://127.0.0.1:8088%g"  "$FILE"
-   sed -i "s%MIRROR_BASE%http://127.0.0.1:8088%g" "$FILE"
-   sed -i "s%BUILD_ENV%$MY_BUILD_ENVIRONMENT%g"   "$FILE"
-   # eg http://127.0.0.1:8088/MY_BUILD_DIR => http://12.0.0.1:8088/localdisk/loadbuild/...
-   sed -i "s%/MY_BUILD_DIR%$MY_BUILD_DIR_TOP%g"   "$FILE"
-   sed -i "s%/MY_REPO_DIR%$MY_REPO%g"             "$FILE"
-   # eg = MY_BUILD_DIR/xyz => /localdisk/loadbuild/.../xyz
-   sed -i "s%MY_BUILD_DIR%$MY_BUILD_DIR_TOP%g"    "$FILE"
-   sed -i "s%MY_REPO_DIR%$MY_REPO%g"              "$FILE"
-
-   # Disable all local-* repos for the build-types other than the current one
-   for bt in std rt; do
-      if [ "$bt" != "$BUILD_TYPE" ]; then
-         # Use the range of lines starting with pattern [local-$bt] until the next line starting with []
-         sed -i "/^\[local-$bt\]/,/^\[/ s/enabled=1/enabled=0/" $FILE
-         sed -i "/^\[StxCentos7Distro-$bt\]/,/^\[/ s/enabled=1/enabled=0/" $FILE
-      fi
-   done
-fi
-
-
-# Add environment variables to mock config if they don't exist
-grep -q "config_opts\['environment'\]\['BUILD_BY'\]" $FILE || \
-    echo "config_opts['environment']['BUILD_BY']" >> $FILE
-
-grep -q "config_opts\['environment'\]\['BUILD_DATE'\]" $FILE || \
-    echo "config_opts['environment']['BUILD_DATE']" >> $FILE
-
-grep -q "config_opts\['environment'\]\['REPO'\]" $FILE || \
-    echo "config_opts['environment']['REPO']" >> $FILE
-
-grep -q "config_opts\['environment'\]\['WRS_GIT_BRANCH'\]" $FILE || \
-    echo "config_opts['environment']['WRS_GIT_BRANCH']" >> $FILE
-
-grep -q "config_opts\['environment'\]\['CGCS_GIT_BRANCH'\]" $FILE || \
-    echo "config_opts['environment']['CGCS_GIT_BRANCH']" >> $FILE
-
-if [ -z $FORMAL_BUILD ]; then
-    grep -q "config_opts\['macros'\]\['%_no_cgcs_license_check'\] = '1'" $FILE || \
-	echo "config_opts['macros']['%_no_cgcs_license_check'] = '1'" >> $FILE
-else
-    sed -i "/config_opts\['macros'\]\['%_no_cgcs_license_check'\] = '1'/d" $FILE
-fi
-
-grep -q "config_opts\['macros'\]\['%_tis_build_type'\] = '$BUILD_TYPE'" $FILE || \
-    echo "config_opts['macros']['%_tis_build_type'] = '$BUILD_TYPE'" >> $FILE
-
-if [ -f /usr/lib64/nosync/nosync.so ]; then
-    grep -q "config_opts\['nosync'\] = True" $FILE || \
-        echo "config_opts['nosync'] = True" >> $FILE
-fi
-
-NETWORK_PKGS=""
-if [ "containers" == "$BUILD_TYPE" ]; then
-    NETWORK_PKGS="bind-utils"
-fi
-
-BUILD_PKGS=''
-if [ "${PKG_MANAGER}" == "yum" ]; then
-    BUILD_PKGS='@buildsys-build'
-elif [ "${PKG_MANAGER}" == "dnf" ]; then
-    # buildsys-build group was dropped when Centos-8 switched to dnf.
-    # We must list all the members plus a few new ones (fedpkg-minimal, epel-rpm-macros).
-    BUILD_PKGS='bash bzip2 coreutils cpio diffutils epel-release epel-rpm-macros fedpkg-minimal findutils gawk gcc gcc-c++ grep gzip info make patch redhat-rpm-config redhat-release rpm-build sed shadow-utils tar unzip util-linux which xz'
-fi
-
-STX_PKGS='pigz lbzip2 bash'
-
-PKGS="${BUILD_PKGS} ${STX_PKGS} ${PKG_MANAGER} ${PYTHON_PKG} ${NETWORK_PKGS}"
-
-grep -q "config_opts\['chroot_setup_cmd'\] = 'install ${PKGS}'" $FILE || \
-    echo "config_opts['chroot_setup_cmd'] = 'install ${PKGS}'" >> $FILE
-
-# Special case for containers.
-# rpmbuild_networking is required for invoking helm commands within mock
-# building containers requires the std repo to be enabled.
-if [ "containers" == "$BUILD_TYPE" ]; then
-    grep -q "config_opts\['rpmbuild_networking'\] = True" $FILE || \
-        echo "config_opts['rpmbuild_networking'] = True" >> $FILE
-
-    grep -q "config_opts\['use_host_resolv'\] = True" $FILE || \
-        echo "config_opts['use_host_resolv'] = True" >> $FILE
-
-   sed -i "/^\[local-std\]/,/^\[/ s/enabled=0/enabled=1/" $FILE
-fi
-
-#
-# Read macros from tis.macros to add to the build config file,
-# for use in RPM spec files
-#
-RPM_MACROS=$MY_REPO/build-tools/tis.macros
-sed 's/#.*//' $RPM_MACROS | grep '=' | while IFS='=' read name value; do
-    # Check if the entry already exists. If so, go to next line
-    grep -q "^config_opts\['macros'\]\['${name}'\] = '${value}'$" $FILE && continue
-
-    # Update or add the entry
-    grep -q "^config_opts\['macros'\]\['${name}'\]" $FILE
-    if [ $? -eq 0 ]; then
-        sed -i -r "s#^(config_opts\['macros'\]\['${name}'\]).*#\1 = '${value}'#" $FILE
-    else
-        echo "config_opts['macros']['${name}'] = '${value}'" >> $FILE
-    fi
-done
-
-# okay, now we have lines for each env var.  Generate the correct values
-
-BUILD_DATE=`date "+%F %T %z"`
-CGCS_GIT_BRANCH=`cd $MY_REPO/stx/; git rev-parse --abbrev-ref HEAD`
-WRS_GIT_BRANCH=`cd $MY_REPO; git rev-parse --abbrev-ref HEAD`
-REPO=$MY_REPO
-
-# Finally, our good friend sed will place the values in the mock config file
-sed -i \
-    -e "s#config_opts\['environment'\]\['BUILD_BY'\].*#config_opts\['environment'\]\['BUILD_BY'\] = '$USER'#" \
-    -e "s#config_opts\['environment'\]\['BUILD_DATE'\].*#config_opts\['environment'\]\['BUILD_DATE'\] = '$BUILD_DATE'#" \
-    -e "s#config_opts\['environment'\]\['REPO'\].*#config_opts\['environment'\]\['REPO'\] = '$REPO'#" \
-    -e "s#config_opts\['environment'\]\['WRS_GIT_BRANCH'\].*#config_opts\['environment'\]\['WRS_GIT_BRANCH'\] = '$WRS_GIT_BRANCH'#" \
-    -e "s#config_opts\['environment'\]\['CGCS_GIT_BRANCH'\].*#config_opts\['environment'\]\['CGCS_GIT_BRANCH'\] = '$CGCS_GIT_BRANCH'#" \
-    $FILE
diff --git a/build-tools/patch-iso b/build-tools/patch-iso
deleted file mode 100755
index e73a3b90..00000000
--- a/build-tools/patch-iso
+++ /dev/null
@@ -1,427 +0,0 @@
-#!/bin/bash
-#
-# Copyright (c) 2018-2020 Wind River Systems, Inc.
-#
-# SPDX-License-Identifier: Apache-2.0
-#
-# Utility for adding patches to an unpatched ISO
-#
-
-source "$(dirname $0)/image-utils.sh"
-
-if [ -z "${MY_REPO}" ]; then
-    echo "Required environment variable MY_REPO is not set"
-    exit 1
-fi
-
-if [ -z "${MY_WORKSPACE}" ]; then
-    echo "Required environment variable MY_WORKSPACE is not set"
-    exit 1
-fi
-
-STX_DIR=${MY_REPO}/stx
-SETUP_PATCH_REPO=${STX_DIR}/update/extras/scripts/setup_patch_repo.sh
-if [ ! -x ${SETUP_PATCH_REPO} ]; then
-    echo "Cannot find or execute ${SETUP_PATCH_REPO}"
-    exit 1
-fi
-
-# Create temp dir if necessary
-export TMPDIR="$MY_WORKSPACE/tmp"
-mkdir -p $TMPDIR
-
-REPO_UPGRADES_DIR=${STX_DIR}/metal/bsp-files/upgrades
-RELEASE_INFO="$(get_release_info)"
-
-if [ $? -ne 0 ]; then
-   echo "ERROR: failed to find a release info file."
-   exit 1
-fi
-
-PLATFORM_RELEASE=$(source $RELEASE_INFO && echo $PLATFORM_RELEASE)
-
-function usage() {
-    echo ""
-    echo "Usage: "
-    echo "   $(basename $0) -i <input bootimage.iso> -o <output bootimage.iso> [ -u ] <patch> ..."
-    echo "        -i <file>: Specify input ISO file"
-    echo "        -o <file>: Specify output ISO file"
-    echo "        -u       : Update with upgrades files from ${REPO_UPGRADES_DIR}"
-    echo ""
-}
-
-function extract_pkg_from_patch_repo() {
-   local repodir=${BUILDDIR}/patches
-   local pkgname=$1
-   local pkgfile=$(repoquery --disablerepo=* --repofrompath local,${repodir} --enablerepo=local --location -q ${pkgname})
-   if [ -z "${pkgfile}" ]; then
-      return 1
-   fi
-
-   rpm2cpio ${pkgfile/file://} | cpio -idmv
-   if [ $? -ne 0 ]; then
-      echo "Failed to extract $pkgname files from ${pkgfile/file://}"
-      exit 1
-   fi
-}
-
-declare INPUT_ISO=
-declare OUTPUT_ISO=
-declare ORIG_PWD=$PWD
-declare DO_UPGRADES=1
-
-while getopts "i:o:u" opt; do
-    case $opt in
-        i)
-            INPUT_ISO=$OPTARG
-            ;;
-        o)
-            OUTPUT_ISO=$OPTARG
-            ;;
-        u)
-            DO_UPGRADES=0
-            ;;
-        *)
-            usage
-            exit 1
-            ;;
-    esac
-done
-
-if [ -z "$INPUT_ISO" -o -z "$OUTPUT_ISO" ]; then
-    usage
-    exit 1
-fi
-
-if [ ! -f ${INPUT_ISO} ]; then
-    echo "Input file does not exist: ${INPUT_ISO}"
-    exit 1
-fi
-
-if [ -f ${OUTPUT_ISO} ]; then
-    echo "Output file already exists: ${OUTPUT_ISO}"
-    exit 1
-fi
-
-shift $((OPTIND-1))
-
-if [ $# -le 0 ]; then
-    usage
-    exit
-fi
-
-for pf in $@; do
-    if [ ! -f $pf ]; then
-        echo "Patch file $pf does not exist"
-        exit 1
-    fi
-
-    if [[ ! $pf =~ \.patch$ ]]; then
-        echo "Specified file $pf does not have .patch extension"
-        exit 1
-    fi
-done
-
-declare MNTDIR=
-declare BUILDDIR=
-declare WORKDIR=
-
-function check_requirements {
-    local -a required_utils=(
-        rsync
-        mkisofs
-        isohybrid
-        implantisomd5
-    )
-    if [ $UID -ne 0 ]; then
-        # If running as non-root user, additional utils are required
-        required_utils+=(
-            guestmount
-            guestunmount
-        )
-    fi
-
-    local -i missing=0
-
-    for req in ${required_utils[@]}; do
-        which ${req} >&/dev/null
-        if [ $? -ne 0 ]; then
-            echo "Unable to find required utility: ${req}" >&2
-            let missing++
-        fi
-    done
-
-    if [ ${missing} -gt 0 ]; then
-        echo "One or more required utilities are missing. Aborting..." >&2
-        exit 1
-    fi
-}
-
-function mount_iso {
-    if [ $UID -eq 0 ]; then
-        # Mount the ISO
-        mount -o loop ${INPUT_ISO} ${MNTDIR}
-        if [ $? -ne 0 ]; then
-            echo "Failed to mount ${INPUT_ISO}" >&2
-            exit 1
-        fi
-    else
-        # As non-root user, mount the ISO using guestmount
-        guestmount -a ${INPUT_ISO} -m /dev/sda1 --ro ${MNTDIR}
-        rc=$?
-        if [ $rc -ne 0 ]; then
-            # Add a retry
-            echo "Call to guestmount failed with rc=$rc. Retrying once..."
-
-            guestmount -a ${INPUT_ISO} -m /dev/sda1 --ro ${MNTDIR}
-            rc=$?
-            if [ $rc -ne 0 ]; then
-                echo "Call to guestmount failed with rc=$rc. Aborting..."
-                exit $rc
-            fi
-        fi
-    fi
-}
-
-function unmount_iso {
-    if [ $UID -eq 0 ]; then
-        umount ${MNTDIR}
-    else
-        guestunmount ${MNTDIR}
-    fi
-    rmdir ${MNTDIR}
-}
-
-function cleanup() {
-    if [ -n "$MNTDIR" -a -d "$MNTDIR" ]; then
-        unmount_iso
-    fi
-
-    if [ -n "$BUILDDIR" -a -d "$BUILDDIR" ]; then
-        \rm -rf $BUILDDIR
-    fi
-
-    if [ -n "$WORKDIR" -a -d "$WORKDIR" ]; then
-        \rm -rf $WORKDIR
-    fi
-}
-
-trap cleanup EXIT
-
-MNTDIR=$(mktemp -d -p $PWD patchiso_mnt_XXXXXX)
-if [ -z "${MNTDIR}" -o ! -d ${MNTDIR} ]; then
-    echo "Failed to create mntdir. Aborting..."
-    exit $rc
-fi
-
-BUILDDIR=$(mktemp -d -p $PWD patchiso_build_XXXXXX)
-if [ -z "${BUILDDIR}" -o ! -d ${BUILDDIR} ]; then
-    echo "Failed to create builddir. Aborting..."
-    exit $rc
-fi
-
-# Mount the ISO
-mount_iso
-
-rsync -a ${MNTDIR}/ ${BUILDDIR}/
-rc=$?
-if [ $rc -ne 0 ]; then
-    echo "Call to rsync ISO content. Aborting..."
-    exit $rc
-fi
-
-unmount_iso
-
-# Setup the patch repo
-${SETUP_PATCH_REPO} -o ${BUILDDIR}/patches $@
-rc=$?
-if [ $rc -ne 0 ]; then
-    echo "Call to $(basename ${SETUP_PATCH_REPO}) failed with rc=$rc. Aborting..."
-    exit $rc
-fi
-
-# Look for components that need modification
-#extract_pkg_from_patch_repo
-WORKDIR=$(mktemp -d -p $PWD patchiso_work_XXXXXX)
-if [ -z "${WORKDIR}" -o ! -d ${WORKDIR} ]; then
-    echo "Failed to create workdir. Aborting..."
-    exit $rc
-fi
-
-\cd ${WORKDIR}
-\mkdir extract
-\cd extract
-
-# Changes to copied files here must also be reflected in build-iso
-
-extract_pkg_from_patch_repo platform-kickstarts
-if [ $? -eq 0 ]; then
-    # Replace files
-    \rm -f ${BUILDDIR}/*ks.cfg &&
-    \cp --preserve=all var/www/pages/feed/rel-*/*.cfg ${BUILDDIR}/ &&
-    \cp --preserve=all ${BUILDDIR}/controller_ks.cfg ${BUILDDIR}/ks.cfg
-    if [ $? -ne 0 ]; then
-        echo "Failed to copy extracted kickstarts"
-        exit 1
-    fi
-fi
-\cd ${WORKDIR}
-\rm -rf extract
-
-\mkdir extract
-\cd extract
-extract_pkg_from_patch_repo platform-kickstarts-pxeboot
-if [ $? -eq 0 ]; then
-    # Replace files
-    \rm -f ${BUILDDIR}/var/pxeboot/pxeboot_controller.cfg \
-        ${BUILDDIR}/var/pxeboot/pxeboot_smallsystem.cfg \
-        ${BUILDDIR}/var/pxeboot/pxeboot_smallsystem_lowlatency.cfg &&
-    \cp --preserve=all pxeboot/* ${BUILDDIR}/var/pxeboot/
-    if [ $? -ne 0 ]; then
-        echo "Failed to copy extracted pxeboot kickstarts"
-        exit 1
-    fi
-fi
-\cd ${WORKDIR}
-\rm -rf extract
-
-\mkdir extract
-\cd extract
-extract_pkg_from_patch_repo pxe-network-installer
-if [ $? -eq 0 ]; then
-    # Replace files
-    \rm -f ${BUILDDIR}/pxeboot/pxelinux.0 \
-        ${BUILDDIR}/pxeboot/menu.c32 \
-        ${BUILDDIR}/pxeboot/chain.c32 &&
-    \cp --preserve=all var/pxeboot/pxelinux.0 var/pxeboot/menu.c32 var/pxeboot/chain.c32 ${BUILDDIR}/pxeboot/
-    if [ $? -ne 0 ]; then
-        echo "Error: Could not copy all files from installer"
-        exit 1
-    fi
-
-    \rm -f ${BUILDDIR}/LiveOS/squashfs.img &&
-    \cp --preserve=all var/www/pages/feed/rel-*/LiveOS/squashfs.img ${BUILDDIR}/LiveOS/
-    if [ $? -ne 0 ]; then
-        echo "Error: Could not copy squashfs from LiveOS"
-        exit 1
-    fi
-
-    # Replace vmlinuz and initrd.img with our own pre-built ones
-    \rm -f \
-        ${BUILDDIR}/vmlinuz \
-        ${BUILDDIR}/images/pxeboot/vmlinuz \
-        ${BUILDDIR}/initrd.img \
-        ${BUILDDIR}/images/pxeboot/initrd.img &&
-    \cp --preserve=all var/pxeboot/rel-*/installer-bzImage_1.0 \
-        ${BUILDDIR}/vmlinuz &&
-    \cp --preserve=all var/pxeboot/rel-*/installer-bzImage_1.0 \
-        ${BUILDDIR}/images/pxeboot/vmlinuz &&
-    \cp --preserve=all var/pxeboot/rel-*/installer-intel-x86-64-initrd_1.0 \
-        ${BUILDDIR}/initrd.img &&
-    \cp --preserve=all var/pxeboot/rel-*/installer-intel-x86-64-initrd_1.0 \
-        ${BUILDDIR}/images/pxeboot/initrd.img
-    if [ $? -ne 0 ]; then
-        echo "Error: Failed to copy installer images"
-        exit 1
-    fi
-fi
-\cd ${WORKDIR}
-\rm -rf extract
-
-\mkdir extract
-\cd extract
-extract_pkg_from_patch_repo grub2-efi-x64-pxeboot
-if [ $? -eq 0 ]; then
-    # Replace files
-    \rm -f ${BUILDDIR}/var/pxeboot/EFI/grubx64.efi &&
-    \cp --preserve=all pxeboot/EFI/grubx64.efi ${BUILDDIR}/var/pxeboot/EFI/
-    if [ $? -ne 0 ]; then
-        echo "Error: Failed to copy grub2-efi-x64-pxeboot files"
-        exit 1
-    fi
-fi
-\cd ${WORKDIR}
-\rm -rf extract
-
-\mkdir extract
-\cd extract
-extract_pkg_from_patch_repo grub2-common
-if [ $? -eq 0 ]; then
-    # Replace files
-    for f in usr/lib/grub/x86_64-efi/*; do
-        f_base=$(basename $f)
-        \rm -f ${BUILDDIR}/var/pxeboot/EFI/$f_base &&
-        \cp --preserve=all ${f} ${BUILDDIR}/var/pxeboot/EFI/
-        if [ $? -ne 0 ]; then
-            echo "Error: Failed to copy grub2-common files"
-            exit 1
-        fi
-    done
-fi
-\cd ${WORKDIR}
-\rm -rf extract
-
-\mkdir extract
-\cd extract
-extract_pkg_from_patch_repo grub2-efi-x64-modules
-if [ $? -eq 0 ]; then
-    # Replace files
-    for f in usr/lib/grub/x86_64-efi/*; do
-        f_base=$(basename $f)
-        \rm -f ${BUILDDIR}/var/pxeboot/EFI/$f_base &&
-        \cp --preserve=all ${f} ${BUILDDIR}/var/pxeboot/EFI/
-        if [ $? -ne 0 ]; then
-            echo "Error: Failed to copy grub2-efi-x64-modules files"
-            exit 1
-        fi
-    done
-fi
-\cd ${WORKDIR}
-\rm -rf extract
-
-\cd ${ORIG_PWD}
-
-if [ ${DO_UPGRADES} -eq 0 ]; then
-    # Changes to copied files here must also be reflected in build-iso
-
-    echo "Updating upgrade support files"
-    ISO_UPGRADES_DIR="${BUILDDIR}/upgrades"
-    \rm -rf ${ISO_UPGRADES_DIR}
-    \mkdir ${ISO_UPGRADES_DIR}
-    \cp ${REPO_UPGRADES_DIR}/* ${ISO_UPGRADES_DIR}
-    sed -i "s/xxxSW_VERSIONxxx/${PLATFORM_RELEASE}/g" ${ISO_UPGRADES_DIR}/metadata.xml
-    chmod +x ${ISO_UPGRADES_DIR}/*.sh
-    # Write the version out (used in upgrade scripts - this is the same as SW_VERSION)
-    echo "VERSION=$PLATFORM_RELEASE" > ${ISO_UPGRADES_DIR}/version
-fi
-
-# Rebuild the ISO
-mkisofs -o ${OUTPUT_ISO} \
-    -R -D -A 'oe_iso_boot' -V 'oe_iso_boot' \
-    -quiet \
-    -b isolinux.bin -c boot.cat -no-emul-boot \
-    -boot-load-size 4 -boot-info-table \
-    -eltorito-alt-boot \
-    -e images/efiboot.img \
-    -no-emul-boot \
-    ${BUILDDIR}
-
-isohybrid --uefi ${OUTPUT_ISO}
-implantisomd5 ${OUTPUT_ISO}
-
-# Sign the .iso with the developer private key
-# Signing with the formal key is only to be done for customer release
-# and is a manual step afterwards, as with the GA ISO
-openssl dgst -sha256 \
-    -sign ${MY_REPO}/build-tools/signing/dev-private-key.pem \
-    -binary \
-    -out ${OUTPUT_ISO/%.iso/.sig} \
-    ${OUTPUT_ISO}
-rc=$?
-if [ $rc -ne 0 ]; then
-    echo "Call to $(basename ${SETUP_PATCH_REPO}) failed with rc=$rc. Aborting..."
-    exit $rc
-fi
-
-echo "Patched ISO: ${OUTPUT_ISO}"
-
diff --git a/build-tools/patch_rebase_1 b/build-tools/patch_rebase_1
deleted file mode 100755
index 4105a731..00000000
--- a/build-tools/patch_rebase_1
+++ /dev/null
@@ -1,140 +0,0 @@
-#!/bin/bash
-
-#
-# Start an edit session for packages to be upgraded - pre upgrade version
-#
-
-# For backward compatibility.  Old repo location or new?
-CENTOS_REPO=${MY_REPO}/centos-repo
-if [ ! -d ${CENTOS_REPO} ]; then
-    CENTOS_REPO=${MY_REPO}/cgcs-centos-repo
-    if [ ! -d ${CENTOS_REPO} ]; then
-        echo "ERROR: directory ${MY_REPO}/centos-repo not found."
-        exit 1
-    fi
-fi
-
-usage () {
-    echo ""
-    echo "Step 1: Start an edit session for packages to be upgraded - pre upgrade version"
-    echo ""
-    echo "Usage: "
-    echo "   patch_rebase_1 [--origin_branch <branch>] [--working_branch <branch>] [--upversion_data <file>]"
-    echo ""
-    echo "Assumes $(basename ${CENTOS_REPO}) already has a working_branch commit that sets the new symlinks."
-    echo ""
-    echo "The upversion_data file has data on all the src.rpm being updated in the format:"
-    echo "  export UPVERSION_DATA=$MY_WORKSPACE/upversion.log"
-    echo "  PKG=lighttpd"
-    echo "  OLD_SRC_RPM=lighttpd-1.4.41-1.el7.src.rpm"
-    echo "  NEW_SRC_RPM=lighttpd-1.4.41-2.el7.src.rpm"
-    echo "  SRPM_PATH=$MY_REPO/stx/integ/extended/lighttpd/centos/srpm_path"
-    echo "  echo \"\$PKG#\$SRPM_PATH##\$OLD_SRC_RPM#\$NEW_SRC_RPM\" > UPVERSION_DATA"
-    echo ""
-}
-
-TEMP=`getopt -o h --long origin_branch:,working_branch:,upversion_data:,help -n 'test.sh' -- "$@"`
-eval set -- "$TEMP"
-
-ORIGIN_BRANCH=""
-WORKING_BRANCH=""
-UPVERSION_LOG=""
-HELP=0
-
-while true ; do
-    case "$1" in
-        --origin_branch)  shift ; ORIGIN_BRANCH="$1" ; shift ;;
-        --working_branch) shift ; WORKING_BRANCH="$1" ; shift ;;
-        --upversion_data) shift ; UPVERSION_LOG="$1" ; shift ;;
-        -h|--help)        HELP=1 ; shift ;;
-        --)               shift ; break ;;
-        *)                usage; exit 1 ;;
-    esac
-done
-
-if [ $HELP -eq 1 ]; then
-    usage
-    exit 0
-fi
-
-if [ "$UPVERSION_LOG" == "" ]; then
-   UPVERSION_LOG=$UPVERSION_DATA
-fi
-
-if [ "$UPVERSION_LOG" == "" ]; then
-   echo "ERROR: please specify location of upversion data"
-   usage
-   exit 1
-fi
-
-if [ ! -f "$UPVERSION_LOG" ]; then
-   echo "File not found: '$UPVERSION_LOG'"
-   exit 1
-fi
-
-if [ "$ORIGIN_BRANCH" == "" ] && [ "$WORKING_BRANCH" == "" ]; then
-   ORIGIN_BRANCH=$PATCH_SOURCE_BRANCH
-   WORKING_BRANCH=$MY_PATCH_BRANCH
-fi
-
-if [ "$ORIGIN_BRANCH" == "" ] && [ "$WORKING_BRANCH" == "" ]; then
-   ORIGIN_BRANCH=$SOURCE_BRANCH
-   WORKING_BRANCH=$MY_BRANCH
-fi
-
-if [ "$ORIGIN_BRANCH" == "" ]; then
-   echo "ERROR: please specify a origin branch"
-   usage
-   exit 1
-fi
-
-if [ "$WORKING_BRANCH" == "" ]; then
-   echo "ERROR: please specify a working branch"
-   usage
-   exit 1
-fi
-
-# One step back to see the old symlinks
-cd ${CENTOS_REPO}
-git checkout $WORKING_BRANCH
-if [ $? != 0 ]; then
-   echo "ERROR: Can't checkout branch '$WORKING_BRANCH' in directory '$(pwd)'"
-   exit 1
-fi
-
-git checkout HEAD^
-
-FAILED=""
-for dat in $(cat $UPVERSION_LOG); do
-   name=$(echo $dat | awk -F '#' '{print $1}')
-   srpm_path=$(echo $dat | awk -F '#' '{print $2}')
-   old_src_rpm=$(echo $dat | awk -F '#' '{print $4}')
-   new_src_rpm=$(echo $dat | awk -F '#' '{print $5}')
-
-   echo "$name  $old_src_rpm  $new_src_rpm"
-
-   build-pkgs --edit --clean $name
-   if [ $? -ne 0 ]; then
-      echo "ERROR: failed cmd 'build-pkgs --edit --clean $name'"
-      FAILED="$name $FAILED"
-      break
-   fi
-   echo "$?   <<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<<"
-   build-pkgs --edit $name
-   if [ $? -ne 0 ]; then
-      echo "ERROR: failed cmd 'build-pkgs --edit $name'"
-      FAILED="$name $FAILED"
-      break
-   fi
-   echo "$?   <=<=<=<=<=<=<=<=<=<=<=<=<=<=<=<="
-done
-
-cd ${CENTOS_REPO}
-git checkout $WORKING_BRANCH
-
-if [ "$FAILED" != "" ]; then
-   echo "Failed build-pkgs --edit for ... $FAILED"
-   exit 1
-fi
-
-
diff --git a/build-tools/patch_rebase_2 b/build-tools/patch_rebase_2
deleted file mode 100755
index 0e575314..00000000
--- a/build-tools/patch_rebase_2
+++ /dev/null
@@ -1,158 +0,0 @@
-#!/bin/bash
-
-#
-# Update srpm_path for packages to be upgraded
-#
-
-# For backward compatibility.  Old repo location or new?
-CENTOS_REPO=${MY_REPO}/centos-repo
-if [ ! -d ${CENTOS_REPO} ]; then
-    CENTOS_REPO=${MY_REPO}/cgcs-centos-repo
-    if [ ! -d ${CENTOS_REPO} ]; then
-        echo "ERROR: directory ${MY_REPO}/centos-repo not found."
-        exit 1
-    fi
-fi
-
-usage () {
-    echo ""
-    echo "Step 2: Update srpm_path for packages to be upgraded"
-    echo ""
-    echo "Usage: "
-    echo "   patch_rebase_2 [--origin_branch <branch>] [--working_branch <branch>] [--upversion_data <file>]"
-    echo ""
-    echo "Assumes $(basename ${CENTOS_REPO}) already has a working_branch commit that sets the new symlinks."
-    echo ""
-    echo "The upversion_data file has data on all the src.rpm being updated in the format:"
-    echo "  export UPVERSION_DATA=$MY_WORKSPACE/upversion.log"
-    echo "  PKG=lighttpd"
-    echo "  OLD_SRC_RPM=lighttpd-1.4.41-1.el7.src.rpm"
-    echo "  NEW_SRC_RPM=lighttpd-1.4.41-2.el7.src.rpm"
-    echo "  SRPM_PATH=$MY_REPO/stx/integ/extended/lighttpd/centos/srpm_path"
-    echo "  echo \"\$PKG#\$SRPM_PATH##\$OLD_SRC_RPM#\$NEW_SRC_RPM\" > UPVERSION_DATA"
-    echo ""
-}
-
-
-TEMP=`getopt -o h --long origin_branch:,working_branch:,upversion_data:,help -n 'test.sh' -- "$@"`
-eval set -- "$TEMP"
-
-ORIGIN_BRANCH=""
-WORKING_BRANCH=""
-UPVERSION_LOG=""
-HELP=0
-
-while true ; do
-    case "$1" in
-        --origin_branch)  shift ; ORIGIN_BRANCH="$1" ; shift ;;
-        --working_branch) shift ; WORKING_BRANCH="$1" ; shift ;;
-        --upversion_data) shift ; UPVERSION_LOG="$1" ; shift ;;
-        -h|--help)        HELP=1 ; shift ;;
-        --)               shift ; break ;;
-        *)                usage; exit 1 ;;
-    esac
-done
-
-if [ $HELP -eq 1 ]; then
-    usage
-    exit 0
-fi
-
-if [ "$UPVERSION_LOG" == "" ]; then
-   UPVERSION_LOG=$UPVERSION_DATA
-fi
-
-if [ "$UPVERSION_LOG" == "" ]; then
-   echo "ERROR: please specify location of upversion data"
-   usage
-   exit 1
-fi
-
-if [ ! -f "$UPVERSION_LOG" ]; then
-   echo "File not found: '$UPVERSION_LOG'"
-   exit 1
-fi
-
-if [ "$ORIGIN_BRANCH" == "" ] && [ "$WORKING_BRANCH" == "" ]; then
-   ORIGIN_BRANCH=$PATCH_SOURCE_BRANCH
-   WORKING_BRANCH=$MY_PATCH_BRANCH
-fi
-
-if [ "$ORIGIN_BRANCH" == "" ] && [ "$WORKING_BRANCH" == "" ]; then
-   ORIGIN_BRANCH=$SOURCE_BRANCH
-   WORKING_BRANCH=$MY_BRANCH
-fi
-
-if [ "$ORIGIN_BRANCH" == "" ]; then
-   echo "ERROR: please specify a origin branch"
-   usage
-   exit 1
-fi
-
-if [ "$WORKING_BRANCH" == "" ]; then
-   echo "ERROR: please specify a working branch"
-   usage
-   exit 1
-fi
-
-# One step back to see the old symlinks
-cd $MY_REPO
-
-FAILED=""
-for dat in $(cat $UPVERSION_LOG); do
-   name=$(echo $dat | awk -F '#' '{print $1}')
-   srpm_path=$(echo $dat | awk -F '#' '{print $2}')
-   old_src_rpm=$(echo $dat | awk -F '#' '{print $4}')
-   new_src_rpm=$(echo $dat | awk -F '#' '{print $5}')
-
-   (
-   cd $(dirname $srpm_path)
-   CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD)
-   if [ "$CURRENT_BRANCH" != "$WORKING_BRANCH" ]; then
-      git checkout $WORKING_BRANCH
-      if [ $? -ne 0 ]; then
-         git checkout $ORIGIN_BRANCH
-         if [ $? -ne 0 ]; then
-            echo "ERROR: Can't checkout branch '$ORIGIN_BRANCH' in directory '$(pwd)'"
-            exit 1
-         fi
-
-         git checkout -b $WORKING_BRANCH
-         if [ $? -ne 0 ]; then
-            echo "ERROR: failed to 'git checkout -b $WORKING_BRANCH' from '$(pwd)'"
-            exit 1
-         else
-            echo "created branch '$WORKING_BRANCH' at '$(pwd)'"
-         fi
-      fi
-   fi
-
-   sed -i "s#$old_src_rpm#$new_src_rpm#" $srpm_path
-   if [ $? -ne 0 ]; then
-      echo "ERROR: sed failed '$old_src_rpm' -> '$new_src_rpm'"
-      exit 1
-   else
-      echo "updated $srpm_path: '$old_src_rpm' -> '$new_src_rpm'"
-   fi
-
-   exit 0
-   )
-
-   if [ $? -ne 0 ]; then
-      echo "ERROR: failed while working on package '$name' at '$srpm_path'"
-      exit 1
-   fi
-done
-
-echo ""
-for d in $(for dat in $(cat $UPVERSION_LOG); do srpm_path=$(echo $dat | awk -F '#' '{print $2}'); ( cd $(dirname $srpm_path); git rev-parse --show-toplevel ); done | sort --unique); do
-   (
-    cd $d
-    echo "cd $d"
-    for f in $(git status --porcelain | grep 'srpm_path$' | awk '{print $2}'); do 
-        echo "git add $f";
-    done
-    echo "git commit -m 'srpm_path updates for patch $PATCH_ID'"
-   )
-done
-echo ""
diff --git a/build-tools/patch_rebase_3 b/build-tools/patch_rebase_3
deleted file mode 100755
index 026d50e8..00000000
--- a/build-tools/patch_rebase_3
+++ /dev/null
@@ -1,129 +0,0 @@
-#!/bin/bash
-
-#
-# Start an edit session for packages to be upgraded - post upgrade version
-#
-
-# For backward compatibility.  Old repo location or new?
-CENTOS_REPO=${MY_REPO}/centos-repo
-if [ ! -d ${CENTOS_REPO} ]; then
-    CENTOS_REPO=${MY_REPO}/cgcs-centos-repo
-    if [ ! -d ${CENTOS_REPO} ]; then
-        echo "ERROR: directory ${MY_REPO}/centos-repo not found."
-        exit 1
-    fi
-fi
-
-usage () {
-    echo ""
-    echo "Step 3: Start an edit session for packages to be upgraded - post upgrade version"
-    echo ""
-    echo "Usage: "
-    echo "   patch_rebase_3 [--origin_branch <branch>] [--working_branch <branch>] [--upversion_data <file>]"
-    echo ""
-    echo "Assumes $(basename ${CENTOS_REPO}) already has a working_branch commit that sets the new symlinks."
-    echo ""
-    echo "The upversion_data file has data on all the src.rpm being updated in the format:"
-    echo "  export UPVERSION_DATA=$MY_WORKSPACE/upversion.log"
-    echo "  PKG=lighttpd"
-    echo "  OLD_SRC_RPM=lighttpd-1.4.41-1.el7.src.rpm"
-    echo "  NEW_SRC_RPM=lighttpd-1.4.41-2.el7.src.rpm"
-    echo "  SRPM_PATH=$MY_REPO/stx/integ/extended/lighttpd/centos/srpm_path"
-    echo "  echo \"\$PKG#\$SRPM_PATH##\$OLD_SRC_RPM#\$NEW_SRC_RPM\" > UPVERSION_DATA"
-    echo ""
-}
-
-
-TEMP=`getopt -o h --long origin_branch:,working_branch:,upversion_data:,help -n 'test.sh' -- "$@"`
-eval set -- "$TEMP"
-
-ORIGIN_BRANCH=""
-WORKING_BRANCH=""
-UPVERSION_LOG=""
-HELP=0
-
-while true ; do
-    case "$1" in
-        --origin_branch)  shift ; ORIGIN_BRANCH="$1" ; shift ;;
-        --working_branch) shift ; WORKING_BRANCH="$1" ; shift ;;
-        --upversion_data) shift ; UPVERSION_LOG="$1" ; shift ;;
-        -h|--help)        HELP=1 ; shift ;;
-        --)               shift ; break ;;
-        *)                usage; exit 1 ;;
-    esac
-done
-
-if [ $HELP -eq 1 ]; then
-    usage
-    exit 0
-fi
-
-if [ "$UPVERSION_LOG" == "" ]; then
-   UPVERSION_LOG=$UPVERSION_DATA
-fi
-
-if [ "$UPVERSION_LOG" == "" ]; then
-   echo "ERROR: please specify location of upversion data"
-   usage
-   exit 1
-fi
-
-if [ ! -f "$UPVERSION_LOG" ]; then
-   echo "File not found: '$UPVERSION_LOG'"
-   exit 1
-fi
-
-if [ "$ORIGIN_BRANCH" == "" ] && [ "$WORKING_BRANCH" == "" ]; then
-   ORIGIN_BRANCH=$PATCH_SOURCE_BRANCH
-   WORKING_BRANCH=$MY_PATCH_BRANCH
-fi
-
-if [ "$ORIGIN_BRANCH" == "" ] && [ "$WORKING_BRANCH" == "" ]; then
-   ORIGIN_BRANCH=$SOURCE_BRANCH
-   WORKING_BRANCH=$MY_BRANCH
-fi
-
-if [ "$ORIGIN_BRANCH" == "" ]; then
-   echo "ERROR: please specify a origin branch"
-   usage
-   exit 1
-fi
-
-if [ "$WORKING_BRANCH" == "" ]; then
-   echo "ERROR: please specify a working branch"
-   usage
-   exit 1
-fi
-
-# One step back to see the old symlinks
-cd ${CENTOS_REPO}
-git checkout $WORKING_BRANCH
-if [ $? != 0 ]; then
-   echo "ERROR: Can't checkout branch '$WORKING_BRANCH' in directory '${CENTOS_REPO}'"
-   exit 1
-fi
-
-FAILED=""
-for dat in $(cat $UPVERSION_LOG); do
-   name=$(echo $dat | awk -F '#' '{print $1}')
-   srpm_path=$(echo $dat | awk -F '#' '{print $2}')
-   old_src_rpm=$(echo $dat | awk -F '#' '{print $4}')
-   new_src_rpm=$(echo $dat | awk -F '#' '{print $5}')
-
-   echo "$name  $old_src_rpm  $new_src_rpm"
-
-   build-pkgs --edit $name --no-meta-patch
-   if [ $? -ne 0 ]; then
-      echo "ERROR: failed cmd 'build-pkgs --edit $name'"
-      FAILED="$name $FAILED"
-      break
-   fi
-   echo "$?   <=<=<=<=<=<=<=<=<=<=<=<=<=<=<=<="
-done
-
-if [ "$FAILED" != "" ]; then
-   echo "Failed build-pkgs --edit for ... $FAILED"
-   exit 1
-fi
-
-
diff --git a/build-tools/patch_rebase_4 b/build-tools/patch_rebase_4
deleted file mode 100755
index ada6fa30..00000000
--- a/build-tools/patch_rebase_4
+++ /dev/null
@@ -1,413 +0,0 @@
-#!/bin/bash
-
-#
-# Migrate Titanium Cloud patches to the new package version
-#
-
-# For backward compatibility.  Old repo location or new?
-CENTOS_REPO=${MY_REPO}/centos-repo
-if [ ! -d ${CENTOS_REPO} ]; then
-    CENTOS_REPO=${MY_REPO}/cgcs-centos-repo
-    if [ ! -d ${CENTOS_REPO} ]; then
-        echo "ERROR: directory ${MY_REPO}/centos-repo not found."
-        exit 1
-    fi
-fi
-
-usage () {
-    echo ""
-    echo "Step 4: Migrate Titanium Cloud patches to the new package version"
-    echo ""
-    echo "Usage: "
-    echo "   patch_rebase_4 [--origin_branch <branch>] [--working_branch <branch>] [--upversion_data <file>]"
-    echo ""
-    echo "Assumes $(basename ${CENTOS_REPO}) already has a working_branch commit that sets the new symlinks."
-    echo ""
-    echo "The upversion_data file has data on all the src.rpm being updated in the format:"
-    echo "  export UPVERSION_DATA=$MY_WORKSPACE/upversion.log"
-    echo "  PKG=lighttpd"
-    echo "  OLD_SRC_RPM=lighttpd-1.4.41-1.el7.src.rpm"
-    echo "  NEW_SRC_RPM=lighttpd-1.4.41-2.el7.src.rpm"
-    echo "  SRPM_PATH=$MY_REPO/stx/integ/extended/lighttpd/centos/srpm_path"
-    echo "  echo \"\$PKG#\$SRPM_PATH##\$OLD_SRC_RPM#\$NEW_SRC_RPM\" > UPVERSION_DATA"
-    echo ""
-}
-
-
-TEMP=`getopt -o h --long origin_branch:,working_branch:,upversion_data:,help -n 'test.sh' -- "$@"`
-eval set -- "$TEMP"
-
-ORIGIN_BRANCH=""
-WORKING_BRANCH=""
-UPVERSION_LOG=""
-HELP=0
-
-while true ; do
-    case "$1" in
-        --origin_branch)  shift ; ORIGIN_BRANCH="$1" ; shift ;;
-        --working_branch) shift ; WORKING_BRANCH="$1" ; shift ;;
-        --upversion_data) shift ; UPVERSION_LOG="$1" ; shift ;;
-        -h|--help)        HELP=1 ; shift ;;
-        --)               shift ; break ;;
-        *)                usage; exit 1 ;;
-    esac
-done
-
-if [ $HELP -eq 1 ]; then
-    usage
-    exit 0
-fi
-
-if [ "$UPVERSION_LOG" == "" ]; then
-   UPVERSION_LOG=$UPVERSION_DATA
-fi
-
-if [ "$UPVERSION_LOG" == "" ]; then
-   echo "ERROR: please specify location of upversion data"
-   usage
-   exit 1
-fi
-
-if [ ! -f "$UPVERSION_LOG" ]; then
-   echo "File not found: '$UPVERSION_LOG'"
-   exit 1
-fi
-
-if [ "$ORIGIN_BRANCH" == "" ] && [ "$WORKING_BRANCH" == "" ]; then
-   ORIGIN_BRANCH=$PATCH_SOURCE_BRANCH
-   WORKING_BRANCH=$MY_PATCH_BRANCH
-fi
-
-if [ "$ORIGIN_BRANCH" == "" ] && [ "$WORKING_BRANCH" == "" ]; then
-   ORIGIN_BRANCH=$SOURCE_BRANCH
-   WORKING_BRANCH=$MY_BRANCH
-fi
-
-if [ "$ORIGIN_BRANCH" == "" ]; then
-   echo "ERROR: please specify a origin branch"
-   usage
-   exit 1
-fi
-
-if [ "$WORKING_BRANCH" == "" ]; then
-   echo "ERROR: please specify a working branch"
-   usage
-   exit 1
-fi
-
-if [ "$DISPLAY" == "" ]; then
-   echo "ERROR: X-Windows 'DISPLAY' variable not set. This script needs to open pop-up windows."
-   usage
-   exit 1
-fi
-
-# One step back to see the old symlinks
-cd ${CENTOS_REPO}
-git checkout $WORKING_BRANCH
-if [ $? != 0 ]; then
-   echo "ERROR: Can't checkout branch '$WORKING_BRANCH' in directory '${CENTOS_REPO}'"
-   exit 1
-fi
-
-FAILED=""
-build_types="std rt"
-for dat in $(cat $UPVERSION_LOG); do
-   (
-   name=$(echo $dat | awk -F '#' '{print $1}')
-   srpm_path=$(echo $dat | awk -F '#' '{print $2}')
-   old_src_rpm=$(echo $dat | awk -F '#' '{print $4}')
-   new_src_rpm=$(echo $dat | awk -F '#' '{print $5}')
-
-   PKG_DIR=$(dirname $(dirname $srpm_path))
-   OLD_BRANCH=$(echo $old_src_rpm | sed 's#[.]src[.]rpm$##')
-   NEW_BRANCH=$(echo $new_src_rpm | sed 's#[.]src[.]rpm$##')
-   
-   WORK_META_DIR=""
-   for dd in $build_types; do
-      WORK_META_DIR=$MY_WORKSPACE/$dd/srpm_work/$name/rpmbuild
-      echo "WORK_META_DIR=$WORK_META_DIR"
-      if [ -d $WORK_META_DIR ]; then
-         break;
-      else
-         WORK_META_DIR=""
-      fi
-   done
-   if [ "$WORK_META_DIR" == "" ]; then
-      echo "ERROR: failed to find srpm_work directory for '$name'"
-      exit 1
-   fi
-
-   # WORK_SRC_DIR=$(dirname $(find $MY_WORKSPACE/srpm_work/$name/gits/ -type d -name .git))
-   NEW_WORK_SRC_DIR=""
-   OLD_WORK_SRC_DIR=""
-   for dd in $build_types; do
-      for g in $(find $MY_WORKSPACE/$dd/srpm_work/$name/gits/ -type d -name .git); do
-         d=$(dirname $g)
-         if [ -d $d ]; then
-            cd $d;
-            git tag | grep pre_wrs_ >> /dev/null
-            if [ $? -ne 0 ]; then
-               continue
-            fi
-            git checkout $OLD_BRANCH 2>> /dev/null
-            if [ $? -eq 0 ]; then
-               OLD_WORK_SRC_DIR=$d
-            fi
-            git checkout $NEW_BRANCH  2>> /dev/null
-            if [ $? -eq 0 ]; then
-               NEW_WORK_SRC_DIR=$d
-            fi
-         fi
-      done
-   done
-   if [ "$WORK_META_DIR" == "" ]; then
-      echo "ERROR: failed to find srpm_work directory for '$name'"
-      exit 1
-   fi
-
-   echo "$name  $old_src_rpm  $new_src_rpm"
-   echo "PKG_DIR=$PKG_DIR"
-   echo "OLD_BRANCH=$OLD_BRANCH"
-   echo "NEW_BRANCH=$NEW_BRANCH"
-   echo "WORK_META_DIR=$WORK_META_DIR"
-   echo "OLD_WORK_SRC_DIR=$OLD_WORK_SRC_DIR"
-   echo "NEW_WORK_SRC_DIR=$NEW_WORK_SRC_DIR"
-   echo ""
-
-   (
-   cd $WORK_META_DIR
-   if [ $? -ne 0 ]; then
-      echo "ERROR: failed to cd to WORK_META_DIR=$WORK_META_DIR"
-      exit 1
-   fi
-   echo "--- old meta git log (oldest to newest) ---"
-   git checkout $OLD_BRANCH
-   if [ $? -ne 0 ]; then
-      echo "ERROR: failed to git checkout OLD_BRANCH=$OLD_BRANCH"
-      exit 1
-   fi
-   git log --graph --pretty=format:'%Cred%h%Creset -%C(yellow)%d%Creset %s %Cgreen(%ci) %C(bold blue)<%an>%Creset' --abbrev-commit | tac
-   PATCH_COMMIT_LIST=$(git log --graph --pretty=format:'%h -%d %s (%ci) <%an>' --abbrev-commit | tac | grep WRS: | grep -v 'WRS: COPY_LIST content' | awk '{ print $2 }')
-   echo "--- new meta git log (oldest to newest) ---"
-   git checkout $NEW_BRANCH
-   if [ $? -ne 0 ]; then
-      echo "ERROR: failed to git checkout NEW_BRANCH=$NEW_BRANCH"
-      exit 1
-   fi
-   git log --graph --pretty=format:'%Cred%h%Creset -%C(yellow)%d%Creset %s %Cgreen(%ci) %C(bold blue)<%an>%Creset' --abbrev-commit | tac
-   REFERENCE_COMMIT=$(git log --graph --pretty=format:'%h -%d %s (%ci) <%an>' --abbrev-commit | head -n 1 | awk '{ print $2 }')
-   echo ""
-
-   for COMMIT in ${PATCH_COMMIT_LIST}; do
-      echo "git cherry-pick $COMMIT"
-      git cherry-pick "$COMMIT"
-      if [ $? -ne 0 ]; then
-         echo "WARNING: 'git cherry-pick $COMMIT' found merge conflicts. Please fix these files" 
-         git status --porcelain | grep '^UU ' | awk '{ print $2}'
-         echo "pwd=$(pwd)"
-         # gitk &
-         echo "git mergetool --no-prompt"
-         git mergetool --no-prompt
-         # for FILE_NAME in $(git status --porcelain | grep '^UU ' | awk '{ print $2}'); do
-         #    xterm -e "vi $FILE_NAME -c '/[<=>][<=>][<=>][<=>]'"
-         #    if [ $? -ne 0 ]; then
-         #       echo "ERROR: problem launching editor on "
-         #       exit 1
-         #    fi
-         # done
-         echo "git cherry-pick --continue"
-         git cherry-pick --continue
-      fi
-   done
-
-   PATCH_LIST=$(git format-patch -n $REFERENCE_COMMIT)
-   if [ $? -ne 0 ]; then
-      echo "ERROR: failed to git format-patch -n REFERENCE_COMMIT=$REFERENCE_COMMIT"
-      exit 1
-   fi
-   for PATCH_FILE in ${PATCH_LIST}; do
-      PATCH_TARGET=$(echo $PATCH_FILE | sed 's/^[0-9][0-9][0-9][0-9]-WRS-//' | sed 's/.patch$//')
-      echo "$PATCH_FILE -> $PATCH_TARGET"
-      N=$(find "$PKG_DIR/centos/meta_patches" -name "$PATCH_TARGET*" | wc -l)
-      if [ $N -eq 1 ]; then
-          PATCH_DEST=$(find "$PKG_DIR/centos/meta_patches" -name "$PATCH_TARGET*")
-          echo "cp -f $PATCH_FILE $PATCH_DEST"
-          \cp -f  $PATCH_FILE  $PATCH_DEST
-          if [ $? -ne 0 ]; then
-             echo "ERROR: copy failed $WORK_META_DIR/$PATCH_FILE -> $PATCH_DEST"
-             exit 1
-          fi
-      else
-          echo "ERROR: Don't know what destination file name to use for patch '$WORK_META_DIR/$PATCH_FILE' derived from commit $COMMIT, and to be copied to '$PKG_DIR/centos/meta_patches'"
-      fi
-   done
-
-   echo ""
-   echo ""
-   )
-
-   if [ $? -ne 0 ]; then
-      FAILED=$name
-      break
-   fi
-
-   (
-   echo "--- old git log (oldest to newest) ---"
-   cd $OLD_WORK_SRC_DIR
-   if [ $? -ne 0 ]; then
-      echo "ERROR: failed to cd to OLD_WORK_SRC_DIR=$OLD_WORK_SRC_DIR"
-      exit 1
-   fi
-
-   git checkout $OLD_BRANCH
-   if [ $? -ne 0 ]; then
-      echo "ERROR: failed to git checkout OLD_BRANCH=$OLD_BRANCH in directory '$OLD_WORK_SRC_DIR'"
-      exit 1
-   fi
-
-   git log --graph --pretty=format:'%Cred%h%Creset -%C(yellow)%d%Creset %s %Cgreen(%ci) %C(bold blue)<%an>%Creset' --abbrev-commit | tac
-   PATCH_COMMIT_LIST=$(git log --graph --pretty=format:'%h -%d %s (%ci) <%an>' --abbrev-commit | tac | grep WRS: | grep -v 'WRS: COPY_LIST content' | awk '{ print $2 }')
-
-   echo "--- new git log (oldest to newest) ---"
-   cd $NEW_WORK_SRC_DIR
-   if [ $? -ne 0 ]; then
-      echo "ERROR: failed to cd to NEW_WORK_SRC_DIR=$NEW_WORK_SRC_DIR"
-      exit 1
-   fi
-
-   git checkout $NEW_BRANCH
-   if [ $? -ne 0 ]; then
-      echo "ERROR: failed to git checkout NEW_BRANCH=$NEW_BRANCH in directory '$NEW_WORK_SRC_DIR'"
-      exit 1
-   fi
-
-   git log --graph --pretty=format:'%Cred%h%Creset -%C(yellow)%d%Creset %s %Cgreen(%ci) %C(bold blue)<%an>%Creset' --abbrev-commit | tac
-   REFERENCE_COMMIT=$(git log --graph --pretty=format:'%h -%d %s (%ci) <%an>' --abbrev-commit | head -n 1 | awk '{ print $2 }')
-   echo ""
-
-   if [ "$OLD_WORK_SRC_DIR" == "$NEW_WORK_SRC_DIR" ]; then
-      for COMMIT in ${PATCH_COMMIT_LIST}; do
-         echo "git cherry-pick $COMMIT"
-         git cherry-pick "$COMMIT"
-         if [ $? -ne 0 ]; then
-            echo "WARNING: 'git cherry-pick $COMMIT' found merge conflicts. Please fix these files" 
-            git status --porcelain | grep '^UU ' | awk '{ print $2}'
-            echo "pwd=$(pwd)"
-            # gitk &
-            echo "git mergetool --no-prompt"
-            git mergetool --no-prompt
-            # for FILE_NAME in $(git status --porcelain | grep '^UU ' | awk '{ print $2}'); do
-            #    xterm -e "vi $FILE_NAME -c '/[<=>][<=>][<=>][<=>]'"
-            #    if [ $? -ne 0 ]; then
-            #       echo "ERROR: problem launching editor on "
-            #       exit 1
-            #    fi
-            # done
-            echo "git cherry-pick --continue"
-            git cherry-pick --continue
-         fi
-      done
-   else
-      cd $OLD_WORK_SRC_DIR
-      PATCH_LIST=$(git format-patch -n pre_wrs_$OLD_BRANCH)
-      if [ $? -ne 0 ]; then
-         echo "ERROR: failed to git format-patch -n REFERENCE_COMMIT=pre_wrs_$OLD_BRANCH"
-         exit 1
-      fi
-      cd $NEW_WORK_SRC_DIR
-      for PATCH_FILE in ${PATCH_LIST}; do
-         cat $OLD_WORK_SRC_DIR/$PATCH_FILE | patch -p1
-         if [ $? -ne 0 ]; then
-            for REJECT in $(find . -name '*.rej'); do
-               FILE_NAME=$(echo $REJECT | sed 's#.rej$##')
-               cd $OLD_WORK_SRC_DIR
-               gitk $FILE_NAME &
-               cd $NEW_WORK_SRC_DIR
-               if [ -f $FILE_NAME ] && [ -f $FILE_NAME.orig ]; then
-                  \cp -f $FILE_NAME.orig $FILE_NAME
-                  xterm -e "vi $FILE_NAME $REJECT"
-                  rm -f $REJECT
-                  rm -f $FILE_NAME.orig
-               fi
-            done
-         fi
-
-         git add --all
-         MSG=$(echo $PATCH_FILE | sed 's/^[0-9][0-9][0-9][0-9]-WRS-//' | sed 's/.patch$//')
-         git commit -m "WRS: $MSG"
-      done
-      
-   fi
-
-   PATCH_LIST=$(git format-patch -n $REFERENCE_COMMIT)
-   if [ $? -ne 0 ]; then
-      echo "ERROR: failed to git format-patch -n REFERENCE_COMMIT=$REFERENCE_COMMIT"
-      exit 1
-   fi
-   for PATCH_FILE in ${PATCH_LIST}; do
-      PATCH_TARGET=$(echo $PATCH_FILE | sed 's/^[0-9][0-9][0-9][0-9]-WRS-Patch[0-9]*-//' | sed 's/^[0-9][0-9][0-9][0-9]-WRS-Patch//' | sed 's/.patch$//')
-      echo "$PATCH_FILE -> $PATCH_TARGET"
-      PKG_PATCH_DIR="$PKG_DIR/centos/patches"
-      N=0
-      if [ -d  "$PKG_PATCH_DIR" ]; then
-         N=$(find "$PKG_PATCH_DIR" -name "$PATCH_TARGET*" | grep -v '[/]meta_patches[/]' | wc -l)
-      fi
-      if [ $N -ne 1 ]; then
-         PKG_PATCH_DIR="$PKG_DIR"
-         if [ -d  "$PKG_PATCH_DIR" ]; then
-            N=$(find "$PKG_PATCH_DIR" -name "$PATCH_TARGET*" | grep -v '[/]meta_patches[/]' | wc -l)
-         fi
-      fi
-      echo "N=$N"
-      echo "PKG_PATCH_DIR=$PKG_PATCH_DIR"
-
-      if [ $N -eq 1 ]; then
-          PATCH_DEST=$(find "$PKG_PATCH_DIR" -name "$PATCH_TARGET*" | grep -v '[/]meta_patches[/]')
-          echo "meld $PATCH_FILE -> $PATCH_DEST"
-          meld  $PATCH_FILE  $PATCH_DEST
-          if [ $? -ne 0 ]; then
-             echo "ERROR: meld failed $WORK_SRC_DIR/$PATCH_FILE -> $PATCH_DEST"
-             exit 1
-          fi
-      else
-          echo "ERROR: Don't know what destination file name to use for patch '$OLD_WORK_SRC_DIR/$PATCH_FILE', and to be copied to '$PKG_PATCH_DIR'"
-      fi
-   done
-
-   echo ""
-   echo ""
-   )
-
-   if [ $? -ne 0 ]; then
-      FAILED=$name
-      break
-   fi
-
-   )
-
-
-done
-
-if [ "$FAILED" != "" ]; then
-   echo "Failed for ... $FAILED"
-   exit 1
-fi
-
-echo ""
-for d in $(for dat in $(cat $UPVERSION_LOG); do srpm_path=$(echo $dat | awk -F '#' '{print $2}'); ( cd $(dirname $srpm_path); git rev-parse --show-toplevel ); done | sort --unique); do
-   (
-    cd $d
-    echo "cd $d"
-    for f in $(git status --porcelain | awk '{print $2}'); do 
-        echo "git add $f"; 
-    done
-    if [ "$PATCH_ID" == "" ]; then
-       echo "git commit -m 'rebased patches'"
-    else
-       echo "git commit -m 'rebased patches for patch $PATCH_ID'"
-    fi
-   )
-done
-echo ""
-
-
diff --git a/build-tools/repo_files/comps.xml b/build-tools/repo_files/comps.xml
deleted file mode 100644
index 91e4f8c9..00000000
--- a/build-tools/repo_files/comps.xml
+++ /dev/null
@@ -1,39 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE comps PUBLIC "-//Red Hat, Inc.//DTD Comps info//EN" "comps.dtd">
-<comps>
-  <group>
-    <id>buildsys-build</id>
-    <name>Buildsystem building group</name>
-    <description/>
-    <default>false</default>
-    <uservisible>false</uservisible>
-    <packagelist>
-      <packagereq type="mandatory">bash</packagereq>
-      <packagereq type="mandatory">bzip2</packagereq>
-      <packagereq type="mandatory">coreutils</packagereq>
-      <packagereq type="mandatory">cpio</packagereq>
-      <packagereq type="mandatory">diffutils</packagereq>
-      <packagereq type="mandatory">epel-release</packagereq>
-      <packagereq type="mandatory">epel-rpm-macros</packagereq>
-      <packagereq type="mandatory">findutils</packagereq>
-      <packagereq type="mandatory">gawk</packagereq>
-      <packagereq type="mandatory">gcc</packagereq>
-      <packagereq type="mandatory">gcc-c++</packagereq>
-      <packagereq type="mandatory">grep</packagereq>
-      <packagereq type="mandatory">gzip</packagereq>
-      <packagereq type="mandatory">hostname</packagereq>
-      <packagereq type="mandatory">info</packagereq>
-      <packagereq type="mandatory">make</packagereq>
-      <packagereq type="mandatory">patch</packagereq>
-      <packagereq type="mandatory">redhat-rpm-config</packagereq>
-      <packagereq type="mandatory">rpm-build</packagereq>
-      <packagereq type="mandatory">sed</packagereq>
-      <packagereq type="mandatory">shadow-utils</packagereq>
-      <packagereq type="mandatory">tar</packagereq>
-      <packagereq type="mandatory">unzip</packagereq>
-      <packagereq type="mandatory">util-linux-ng</packagereq>
-      <packagereq type="mandatory">which</packagereq>
-      <packagereq type="mandatory">xz</packagereq>
-    </packagelist>
-  </group>
-</comps>
diff --git a/build-tools/repo_files/mock.cfg.all.proto b/build-tools/repo_files/mock.cfg.all.proto
deleted file mode 120000
index 2ba14cf5..00000000
--- a/build-tools/repo_files/mock.cfg.all.proto
+++ /dev/null
@@ -1 +0,0 @@
-mock.cfg.centos7.all.proto
\ No newline at end of file
diff --git a/build-tools/repo_files/mock.cfg.centos7.all.proto b/build-tools/repo_files/mock.cfg.centos7.all.proto
deleted file mode 100644
index 95ed980c..00000000
--- a/build-tools/repo_files/mock.cfg.centos7.all.proto
+++ /dev/null
@@ -1,62 +0,0 @@
-config_opts['root'] = 'BUILD_ENV/mock'
-config_opts['target_arch'] = 'x86_64'
-config_opts['legal_host_arches'] = ('x86_64',)
-config_opts['chroot_setup_cmd'] = 'install @buildsys-build'
-config_opts['dist'] = 'el7'  # only useful for --resultdir variable subst
-config_opts['releasever'] = '7'
-config_opts['package_manager'] = 'yum'
-config_opts['yum_builddep_command'] = 'MY_REPO_DIR/build-tools/yum-builddep-wrapper'
-config_opts['use_bootstrap'] = False
-config_opts['use_bootstrap_image'] = False
-config_opts['rpmbuild_networking'] = False
-
-
-config_opts['yum.conf'] = """
-[main]
-keepcache=1
-debuglevel=2
-reposdir=/dev/null
-logfile=/var/log/yum.log
-retries=20
-obsoletes=1
-gpgcheck=0
-assumeyes=1
-syslog_ident=mock
-syslog_device=
-
-# repos
-[local-std]
-name=local-std
-baseurl=LOCAL_BASE/MY_BUILD_DIR/std/rpmbuild/RPMS
-enabled=1
-skip_if_unavailable=1
-metadata_expire=0
-
-[local-rt]
-name=local-rt
-baseurl=LOCAL_BASE/MY_BUILD_DIR/rt/rpmbuild/RPMS
-enabled=1
-skip_if_unavailable=1
-metadata_expire=0
-
-[local-installer]
-name=local-installer
-baseurl=LOCAL_BASE/MY_BUILD_DIR/installer/rpmbuild/RPMS
-enabled=1
-skip_if_unavailable=1
-metadata_expire=0
-
-[StxCentos7Distro]
-name=Stx-Centos-7-Distro
-enabled=1
-baseurl=LOCAL_BASE/MY_REPO_DIR/centos-repo/Binary
-failovermethod=priority
-exclude=kernel-devel libvirt-devel
-
-[StxCentos7Distro-rt]
-name=Stx-Centos-7-Distro-rt
-enabled=1
-baseurl=LOCAL_BASE/MY_REPO_DIR/centos-repo/rt/Binary
-failovermethod=priority
-
-"""
diff --git a/build-tools/repo_files/mock.cfg.centos7.distro.proto b/build-tools/repo_files/mock.cfg.centos7.distro.proto
deleted file mode 100644
index 95ed980c..00000000
--- a/build-tools/repo_files/mock.cfg.centos7.distro.proto
+++ /dev/null
@@ -1,62 +0,0 @@
-config_opts['root'] = 'BUILD_ENV/mock'
-config_opts['target_arch'] = 'x86_64'
-config_opts['legal_host_arches'] = ('x86_64',)
-config_opts['chroot_setup_cmd'] = 'install @buildsys-build'
-config_opts['dist'] = 'el7'  # only useful for --resultdir variable subst
-config_opts['releasever'] = '7'
-config_opts['package_manager'] = 'yum'
-config_opts['yum_builddep_command'] = 'MY_REPO_DIR/build-tools/yum-builddep-wrapper'
-config_opts['use_bootstrap'] = False
-config_opts['use_bootstrap_image'] = False
-config_opts['rpmbuild_networking'] = False
-
-
-config_opts['yum.conf'] = """
-[main]
-keepcache=1
-debuglevel=2
-reposdir=/dev/null
-logfile=/var/log/yum.log
-retries=20
-obsoletes=1
-gpgcheck=0
-assumeyes=1
-syslog_ident=mock
-syslog_device=
-
-# repos
-[local-std]
-name=local-std
-baseurl=LOCAL_BASE/MY_BUILD_DIR/std/rpmbuild/RPMS
-enabled=1
-skip_if_unavailable=1
-metadata_expire=0
-
-[local-rt]
-name=local-rt
-baseurl=LOCAL_BASE/MY_BUILD_DIR/rt/rpmbuild/RPMS
-enabled=1
-skip_if_unavailable=1
-metadata_expire=0
-
-[local-installer]
-name=local-installer
-baseurl=LOCAL_BASE/MY_BUILD_DIR/installer/rpmbuild/RPMS
-enabled=1
-skip_if_unavailable=1
-metadata_expire=0
-
-[StxCentos7Distro]
-name=Stx-Centos-7-Distro
-enabled=1
-baseurl=LOCAL_BASE/MY_REPO_DIR/centos-repo/Binary
-failovermethod=priority
-exclude=kernel-devel libvirt-devel
-
-[StxCentos7Distro-rt]
-name=Stx-Centos-7-Distro-rt
-enabled=1
-baseurl=LOCAL_BASE/MY_REPO_DIR/centos-repo/rt/Binary
-failovermethod=priority
-
-"""
diff --git a/build-tools/repo_files/mock.cfg.centos7.proto b/build-tools/repo_files/mock.cfg.centos7.proto
deleted file mode 100644
index 2ca56fdc..00000000
--- a/build-tools/repo_files/mock.cfg.centos7.proto
+++ /dev/null
@@ -1,61 +0,0 @@
-config_opts['root'] = 'BUILD_ENV/mock'
-config_opts['target_arch'] = 'x86_64'
-config_opts['legal_host_arches'] = ('x86_64',)
-config_opts['chroot_setup_cmd'] = 'install @buildsys-build'
-config_opts['dist'] = 'el7'  # only useful for --resultdir variable subst
-config_opts['releasever'] = '7'
-config_opts['package_manager'] = 'yum'
-config_opts['yum_builddep_command'] = 'MY_REPO_DIR/build-tools/yum-builddep-wrapper'
-config_opts['use_bootstrap'] = False
-config_opts['use_bootstrap_image'] = False
-config_opts['rpmbuild_networking'] = False
-
-
-config_opts['yum.conf'] = """
-[main]
-keepcache=1
-debuglevel=2
-reposdir=/dev/null
-logfile=/var/log/yum.log
-retries=20
-obsoletes=1
-gpgcheck=0
-assumeyes=1
-syslog_ident=mock
-syslog_device=
-
-# repos
-[local-std]
-name=local-std
-baseurl=LOCAL_BASE/MY_BUILD_DIR/std/rpmbuild/RPMS
-enabled=1
-skip_if_unavailable=1
-metadata_expire=0
-
-[local-rt]
-name=local-rt
-baseurl=LOCAL_BASE/MY_BUILD_DIR/rt/rpmbuild/RPMS
-enabled=1
-skip_if_unavailable=1
-metadata_expire=0
-
-[local-installer]
-name=local-installer
-baseurl=LOCAL_BASE/MY_BUILD_DIR/installer/rpmbuild/RPMS
-enabled=1
-skip_if_unavailable=1
-metadata_expire=0
-
-[StxCentos7Distro]
-name=Stx-Centos-7-Distro
-enabled=1
-baseurl=LOCAL_BASE/MY_REPO_DIR/centos-repo/Binary
-failovermethod=priority
-
-[StxCentos7Distro-rt]
-name=Stx-Centos-7-Distro-rt
-enabled=1
-baseurl=LOCAL_BASE/MY_REPO_DIR/centos-repo/rt/Binary
-failovermethod=priority
-
-"""
diff --git a/build-tools/repo_files/mock.cfg.centos8.all.proto b/build-tools/repo_files/mock.cfg.centos8.all.proto
deleted file mode 100644
index c5bb65da..00000000
--- a/build-tools/repo_files/mock.cfg.centos8.all.proto
+++ /dev/null
@@ -1,63 +0,0 @@
-config_opts['root'] = 'BUILD_ENV/mock'
-config_opts['target_arch'] = 'x86_64'
-config_opts['legal_host_arches'] = ('x86_64',)
-config_opts['chroot_setup_cmd'] = 'install bash bzip2 coreutils cpio diffutils epel-release epel-rpm-macros fedpkg-minimal findutils gawk gcc gcc-c++ grep gzip info make patch redhat-rpm-config redhat-release rpm-build sed shadow-utils tar unzip util-linux which xz'
-config_opts['dist'] = 'el8'  # only useful for --resultdir variable subst
-config_opts['releasever'] = '8'
-config_opts['package_manager'] = 'dnf'
-config_opts['use_bootstrap'] = False
-config_opts['use_bootstrap_image'] = False
-config_opts['rpmbuild_networking'] = False
-
-
-config_opts['yum.conf'] = """
-[main]
-keepcache=1
-debuglevel=2
-reposdir=/dev/null
-logfile=/var/log/yum.log
-retries=20
-obsoletes=1
-gpgcheck=0
-assumeyes=1
-syslog_ident=mock
-syslog_device=
-
-# repos
-[local-std]
-name=local-std
-baseurl=LOCAL_BASE/MY_BUILD_DIR/std/rpmbuild/RPMS
-enabled=1
-skip_if_unavailable=1
-metadata_expire=0
-
-[local-rt]
-name=local-rt
-baseurl=LOCAL_BASE/MY_BUILD_DIR/rt/rpmbuild/RPMS
-enabled=1
-skip_if_unavailable=1
-metadata_expire=0
-
-[local-installer]
-name=local-installer
-baseurl=LOCAL_BASE/MY_BUILD_DIR/installer/rpmbuild/RPMS
-enabled=1
-skip_if_unavailable=1
-metadata_expire=0
-
-[StxCentos8Distro]
-name=Stx-Centos-8-Distro
-enabled=1
-baseurl=LOCAL_BASE/MY_REPO_DIR/centos-repo/Binary
-failovermethod=priority
-exclude=kernel-devel libvirt-devel
-module_hotfixes=1
-
-[StxCentos8Distro-rt]
-name=Stx-Centos-8-Distro-rt
-enabled=1
-baseurl=LOCAL_BASE/MY_REPO_DIR/centos-repo/rt/Binary
-failovermethod=priority
-module_hotfixes=1
-
-"""
diff --git a/build-tools/repo_files/mock.cfg.centos8.distro.proto b/build-tools/repo_files/mock.cfg.centos8.distro.proto
deleted file mode 100644
index c5bb65da..00000000
--- a/build-tools/repo_files/mock.cfg.centos8.distro.proto
+++ /dev/null
@@ -1,63 +0,0 @@
-config_opts['root'] = 'BUILD_ENV/mock'
-config_opts['target_arch'] = 'x86_64'
-config_opts['legal_host_arches'] = ('x86_64',)
-config_opts['chroot_setup_cmd'] = 'install bash bzip2 coreutils cpio diffutils epel-release epel-rpm-macros fedpkg-minimal findutils gawk gcc gcc-c++ grep gzip info make patch redhat-rpm-config redhat-release rpm-build sed shadow-utils tar unzip util-linux which xz'
-config_opts['dist'] = 'el8'  # only useful for --resultdir variable subst
-config_opts['releasever'] = '8'
-config_opts['package_manager'] = 'dnf'
-config_opts['use_bootstrap'] = False
-config_opts['use_bootstrap_image'] = False
-config_opts['rpmbuild_networking'] = False
-
-
-config_opts['yum.conf'] = """
-[main]
-keepcache=1
-debuglevel=2
-reposdir=/dev/null
-logfile=/var/log/yum.log
-retries=20
-obsoletes=1
-gpgcheck=0
-assumeyes=1
-syslog_ident=mock
-syslog_device=
-
-# repos
-[local-std]
-name=local-std
-baseurl=LOCAL_BASE/MY_BUILD_DIR/std/rpmbuild/RPMS
-enabled=1
-skip_if_unavailable=1
-metadata_expire=0
-
-[local-rt]
-name=local-rt
-baseurl=LOCAL_BASE/MY_BUILD_DIR/rt/rpmbuild/RPMS
-enabled=1
-skip_if_unavailable=1
-metadata_expire=0
-
-[local-installer]
-name=local-installer
-baseurl=LOCAL_BASE/MY_BUILD_DIR/installer/rpmbuild/RPMS
-enabled=1
-skip_if_unavailable=1
-metadata_expire=0
-
-[StxCentos8Distro]
-name=Stx-Centos-8-Distro
-enabled=1
-baseurl=LOCAL_BASE/MY_REPO_DIR/centos-repo/Binary
-failovermethod=priority
-exclude=kernel-devel libvirt-devel
-module_hotfixes=1
-
-[StxCentos8Distro-rt]
-name=Stx-Centos-8-Distro-rt
-enabled=1
-baseurl=LOCAL_BASE/MY_REPO_DIR/centos-repo/rt/Binary
-failovermethod=priority
-module_hotfixes=1
-
-"""
diff --git a/build-tools/repo_files/mock.cfg.centos8.proto b/build-tools/repo_files/mock.cfg.centos8.proto
deleted file mode 100644
index 08a041b2..00000000
--- a/build-tools/repo_files/mock.cfg.centos8.proto
+++ /dev/null
@@ -1,62 +0,0 @@
-config_opts['root'] = 'BUILD_ENV/mock'
-config_opts['target_arch'] = 'x86_64'
-config_opts['legal_host_arches'] = ('x86_64',)
-config_opts['chroot_setup_cmd'] = 'install bash bzip2 coreutils cpio diffutils epel-release epel-rpm-macros fedpkg-minimal findutils gawk gcc gcc-c++ grep gzip info make patch redhat-rpm-config redhat-release rpm-build sed shadow-utils tar unzip util-linux which xz'
-config_opts['dist'] = 'el8'  # only useful for --resultdir variable subst
-config_opts['releasever'] = '8'
-config_opts['package_manager'] = 'dnf'
-config_opts['use_bootstrap'] = False
-config_opts['use_bootstrap_image'] = False
-config_opts['rpmbuild_networking'] = False
-
-
-config_opts['yum.conf'] = """
-[main]
-keepcache=1
-debuglevel=2
-reposdir=/dev/null
-logfile=/var/log/yum.log
-retries=20
-obsoletes=1
-gpgcheck=0
-assumeyes=1
-syslog_ident=mock
-syslog_device=
-
-# repos
-[local-std]
-name=local-std
-baseurl=LOCAL_BASE/MY_BUILD_DIR/std/rpmbuild/RPMS
-enabled=1
-skip_if_unavailable=1
-metadata_expire=0
-
-[local-rt]
-name=local-rt
-baseurl=LOCAL_BASE/MY_BUILD_DIR/rt/rpmbuild/RPMS
-enabled=1
-skip_if_unavailable=1
-metadata_expire=0
-
-[local-installer]
-name=local-installer
-baseurl=LOCAL_BASE/MY_BUILD_DIR/installer/rpmbuild/RPMS
-enabled=1
-skip_if_unavailable=1
-metadata_expire=0
-
-[StxCentos8Distro]
-name=Stx-Centos-8-Distro
-enabled=1
-baseurl=LOCAL_BASE/MY_REPO_DIR/centos-repo/Binary
-failovermethod=priority
-module_hotfixes=1
-
-[StxCentos8Distro-rt]
-name=Stx-Centos-8-Distro-rt
-enabled=1
-baseurl=LOCAL_BASE/MY_REPO_DIR/centos-repo/rt/Binary
-failovermethod=priority
-module_hotfixes=1
-
-"""
diff --git a/build-tools/repo_files/mock.cfg.distro.proto b/build-tools/repo_files/mock.cfg.distro.proto
deleted file mode 120000
index add71c08..00000000
--- a/build-tools/repo_files/mock.cfg.distro.proto
+++ /dev/null
@@ -1 +0,0 @@
-mock.cfg.centos7.distro.proto
\ No newline at end of file
diff --git a/build-tools/repo_files/mock.cfg.proto b/build-tools/repo_files/mock.cfg.proto
deleted file mode 120000
index 55c2e026..00000000
--- a/build-tools/repo_files/mock.cfg.proto
+++ /dev/null
@@ -1 +0,0 @@
-mock.cfg.centos7.proto
\ No newline at end of file
diff --git a/build-tools/sign-rpms b/build-tools/sign-rpms
deleted file mode 100755
index d57699e4..00000000
--- a/build-tools/sign-rpms
+++ /dev/null
@@ -1,293 +0,0 @@
-#!/bin/bash
-
-# Add file signature to RPMs
-#
-# This script will add file signature to rpms in a given directory.
-# The directory containing the RPMs must be passed as a parameter. There is no default location.
-#
-#
-
-usage () {
-    echo ""
-    echo "Usage: "
-    echo "   sign-rpms -d|--pkg-dir <directory>"
-    echo "   -d --pkg-dir <directory> directory contain the RPMs to sign"
-    echo "   -h|--help this message"
-    echo ""
-}
-
-# number of processors. The process will use all available processors by default.
-NPROCS=$(nproc)
-
-export MOCK=/usr/bin/mock
-
-# check input variables
-function check_vars {
-    # need access to repo, which should normally be defined as MY_REPO in the env
-
-    if [ ! -z "$MY_REPO" ] && [ -d "$MY_REPO" ] ; then
-        INTERNAL_REPO_ROOT=$MY_REPO
-    fi
-
-    if [ -z "$INTERNAL_REPO_ROOT" ] ; then
-        printf "  unable to use \$MY_REPO (value \"$MY_REPO\")\n"
-        printf "  -- checking \$MY_REPO_ROOT_DIR (value \"$MY_REPO_ROOT_DIR\")\n"
-        if [ ! -z "$MY_REPO_ROOT_DIR" ] && [ -d "$MY_REPO_ROOT_DIR/cgcs-root" ] ; then
-            INTERNAL_REPO_ROOT=$MY_REPO_ROOT_DIR/cgcs-root
-            printf "  Found!\n"
-        fi
-    fi
-
-    if [ -z "$INTERNAL_REPO_ROOT" ] ; then
-        printf "  No joy -- checking for \$MY_WORKSPACE/cgcs-root\n"
-        if [ -d "$MY_WORKSPACE/cgcs-root" ] ; then
-            INTERNAL_REPO_ROOT=$MY_WORKSPACE/cgcs-root
-            printf "  Found!\n"
-        fi
-    fi
-
-    if [ -z "$INTERNAL_REPO_ROOT" ] ; then
-        printf "  Error -- could not locate cgcs-root repo.\n"
-        exit 1
-    fi
-
-    if [ -z "$MY_BUILD_ENVIRONMENT" ] ; then
-        printf "  Error -- missing environment variable MY_BUILD_ENVIRONMENT"
-        exit 1
-    fi
-
-    if [ -z "$MY_BUILD_DIR" ] ; then
-        printf "  Error -- missing environment variable MY_BUILD_DIR"
-        exit 1
-    fi
-
-}
-
-#
-# this function will add IMA file signatures to all rpms in the Packages directory
-#
-# the process will copy the signing key and a makefile in the mock env under /tmp
-# it will also mount the Packages directory under /mnt/Packages
-# then mock will be invoked to sign the packages
-#
-# This process is using mock because the build servers do not have the same rpm / rpmsign version
-#
-
-function _local_cleanup {
-
-    printf "Cleaning mock environment\n"
-    $MOCK -q -r $_MOCK_CFG --scrub=all
-
-}
-
-function __local_trapdoor {
-    printf "caught signal while attempting to sign files. Cleaning up."
-    _local_cleanup
-
-    exit 1
-}
-
-
-function sign_packages {
-    OLD_PWD=$PWD
-
-    _MOCK_PKG_DIR=/mnt/Packages
-    _IMA_PRIV_KEY=ima_signing_key.priv
-    _KEY_DIR=$MY_REPO/build-tools/signing
-    _MOCK_KEY_DIR=/mnt/keys
-    _SIGN_MAKEFILE=_sign_pkgs.mk
-    _MK_DIR=$MY_REPO/build-tools/mk
-    _MOCK_MK_DIR=/mnt/mk
-
-    # mock confgiuration file
-    _MOCK_CFG=$MY_BUILD_DIR/${MY_BUILD_ENVIRONMENT}-sign.cfg
-
-    # recreate configuration file 
-    if [ -f $_MOCK_CFG ]; then
-        rm $_MOCK_CFG
-    fi
-    export BUILD_TYPE=std
-    export MY_BUILD_DIR_TOP=$MY_BUILD_DIR
-    modify-build-cfg $_MOCK_CFG
-    #  and customize
-    echo "config_opts['chroot_setup_cmd'] = 'install shadow-utils make rpm-sign'" >> $_MOCK_CFG
-    echo "config_opts['root'] = 'mock-sign'" >> $_MOCK_CFG
-    echo "config_opts['basedir'] = '${MY_WORKSPACE}'" >> $_MOCK_CFG
-    echo "config_opts['cache_topdir'] = '${MY_WORKSPACE}/mock-cache'" >> $_MOCK_CFG
-
-    echo "Signing packages in $_PKG_DIR with $NPROCS threads"
-    echo "using development key $_KEY_DIR/$_IMA_PRIV_KEY"
-
-    printf "Initializing mock environment\n"
-
-    trap __local_trapdoor SIGHUP SIGINT SIGABRT SIGTERM
-
-    # invoke make in mock to sign packages.
-    # this call will also create and initialize the mock env
-    eval $MOCK -q -r $_MOCK_CFG \'--plugin-option=bind_mount:dirs=[\(\"$_PKG_DIR\", \"$_MOCK_PKG_DIR\"\),\(\"$_MK_DIR\",\"$_MOCK_MK_DIR\"\),\(\"$_KEY_DIR\",\"$_MOCK_KEY_DIR\"\)]\' --shell \"cd $_MOCK_PKG_DIR\; make -j $NPROCS -f $_MOCK_MK_DIR/$_SIGN_MAKEFILE KEY=$_MOCK_KEY_DIR/$_IMA_PRIV_KEY\"
-
-    retval=$?
-
-    trap - SIGHUP SIGINT SIGABRT SIGTERM
-
-    _local_cleanup
-
-    if [ $retval -ne 0 ] ; then
-        echo "failed to add file signatures to RPMs in mock environment."
-        return $retval
-    fi
-
-    cd $OLD_PWD
-
-}
-
-function _copy_and_sign {
-
-    # upload rpms to server
-    scp $_PKG_DIR/*.rpm $SIGNING_USER@$SIGNING_SERVER:$_UPLOAD_DIR
-    retval=$?
-    if [ $retval -ne 0 ] ; then
-        echo "ERROR: failed to copy RPM files to signing server."
-        return $retval
-    fi
-
-    # get server to sign packages.
-    ssh $SIGNING_USER@$SIGNING_SERVER -- sudo $SIGNING_SERVER_SCRIPT -s -d $sub
-    retval=$?
-    if [ $retval -ne 0 ] ; then
-        echo "ERROR: failed to sign RPM files."
-        return $retval
-    fi
-
-    # download results back. This overwrites the original files.
-    scp $SIGNING_USER@$SIGNING_SERVER:$_UPLOAD_DIR/*.rpm $_PKG_DIR
-    retval=$?
-    if [ $retval -ne 0 ] ; then
-        echo "ERROR: failed to copy signed RPM files back from signing server."
-        return $retval
-    fi
-
-    return $retval
-
-}
-
-
-function _server_cleanup {
-
-    # cleanup
-    ssh $SIGNING_USER@$SIGNING_SERVER rm $_UPLOAD_DIR/*.rpm
-    if [ $? -ne 0 ] ; then
-        echo "Warning : failed to remove rpms from temporary upload directory ${SIGNING_SERVER}:${_UPLOAD_DIR}."
-    fi
-    ssh $SIGNING_USER@$SIGNING_SERVER rmdir $_UPLOAD_DIR
-    if [ $? -ne 0 ] ; then
-        echo "Warning : failed to remove temporary upload directory ${SIGNING_SERVER}:${_UPLOAD_DIR}."
-    fi
-
-}
-
-function __server_trapdoor {
-
-    printf "caught signal while attempting to sign files. Cleaning up."
-    _server_cleanup
-
-    exit 1
-}
-
-
-function sign_packages_on_server {
-
-    retval=0
-
-    # obtain temporary diretory to upload RPMs on signing server
-    _UPLOAD_DIR=`ssh $SIGNING_USER@$SIGNING_SERVER -- sudo $SIGNING_SERVER_SCRIPT -r`
-
-    retval=$?
-    if [ $retval -ne 0 ] ; then
-        echo "failed to obtain upload directory from signing server."
-        return $retval
-    fi
-
-    # extract base chroot dir and rpm dir within chroot
-    read base com sub <<< $_UPLOAD_DIR
-
-    # this is the upload temp dir, outside of chroot env
-    _UPLOAD_DIR=$base$sub
-
-    trap __server_trapdoor SIGHUP SIGINT SIGABRT SIGTERM
-
-    _copy_and_sign
-    retval=$?
-
-    trap - SIGHUP SIGINT SIGABRT SIGTERM
-
-    _server_cleanup
-
-    return $retval
-}
-
-
-
-#############################################
-# Main code
-#############################################
-
-# Check args
-HELP=0
-
-# return value
-retval=0
-
-# read the options
-TEMP=`getopt -o hd: --long help,pkg-dir: -n 'test.sh' -- "$@"`
-if [ $? -ne 0 ] ; then
-    echo "Invalid parameters - exiting"
-    exit 1
-fi
-
-eval set -- "$TEMP"
-
-# extract options and their arguments into variables.
-while true ; do
-    case "$1" in
-        -h|--help) HELP=1 ; shift ;;
-        -d|--pkg-dir) _PKG_DIR="$2"; shift; shift ;;
-        --) shift ; break ;;
-        *) echo "Internal error : unexpected parameter $2" ; exit 1 ;;
-    esac
-done
-
-if [ $HELP -eq 1 ]; then
-    usage
-    exit 0
-fi
-
-# package directory must be defined
-if [ -z "$_PKG_DIR" ]; then
-    echo "Need package directory. Use -d/--pkg-dir option"
-    usage
-    exit 1
-fi
-
-# ... and must exist
-if [ ! -d "$_PKG_DIR" ]; then
-    echo "Package directory $_PKG_DIR does not exist"
-    exit 1
-fi
-
-# Init variables
-check_vars
-
-echo signing $_PKG_DIR
-
-# sign all rpms
-if [ "$USER" == "jenkins" ] && [ ! -z "${SIGNING_USER}" ] && [ ! -z "${SIGNING_SERVER}" ] && [ ! -z "${SIGNING_SERVER_SCRIPT}" ]; then
-    sign_packages_on_server
-    retval=$?
-else
-    sign_packages
-    retval=$?
-fi
-
-exit $retval
-
diff --git a/build-tools/sign-secure-boot b/build-tools/sign-secure-boot
deleted file mode 100755
index c9f2cc0c..00000000
--- a/build-tools/sign-secure-boot
+++ /dev/null
@@ -1,538 +0,0 @@
-#!/bin/bash
-
-#
-# Copyright (c) 2018 Wind River Systems, Inc.
-#
-# SPDX-License-Identifier: Apache-2.0
-#
-
-# This script calls into an external signing server to perform signing of some
-# packages in the system.  The old packages (which are typically generated by
-# the build system and signed by placeholder keys) are overwritten by the new
-# packages.
-#
-# Three types of packages are signed:
-# kernels (both std and lowlatency, aka "rt", kernels)
-# grub
-# shim
-#
-# Kernels and grub are generated by producing (under the normal build system)
-# two packages -- a package containing the unsigned binaries, and a package
-# containing binaries signed with temporary keys.  All the "accessories" (files,
-# scripts, etc) are included in the package containing the signed-with-temp-keys
-# files.  The signing server will take both packages, sign the unsigned
-# binaries, and replace the files in the signed package with the newly signed
-# ones.
-#
-# Typical flow/artifacts
-# kernel.src.rpm      -> produces kernel.rpm and kernel-unsigned.rpm
-# kernel.rpm          -> initially contains binaries signed with a temporary key
-#                     -> contains all files used by the kernel
-#                     -> can be installed and used in a system (it just won't
-#                        secure boot since the key is just a temp key)
-# kernel-unsigned.rpm -> contains just unsigned kernel binaries
-#
-# The signing server will take both packages, sign the binaries in
-# kernel-unsigned.rpm with our private key, and replace the binaries in
-# kernel.rpm with the new binaries.  The kernel.rpm can then be replaced by the
-# version generated by the signing server.
-#
-# Shim is a bit of a different beast.
-#
-# There are two source packages - shim and shim-signed.  Frustratingly, "shim"
-# source produces a "shim-unsigned" binary output.  "shim-signed" produces a
-# "shim" binary output. 
-#
-# The "shim-signed" source RPM doesn't contain source code -- it just contains
-# instructions to take the "shim-unsigned" binaries, sign them, and package the
-# output.  We've modified the shim-signed RPM to (rather than sign with a temp
-# key) use "presigned" binaries from shim-unsigned if the files exist.  (It will
-# still use a temp key of no presigned files are found, which is how the build
-# system normally runs).
-#
-# The signing server will unpack the shim-unsigned package, sign the binaries
-# (as "presigned") and repack the package.
-#
-# A rebuild of shim-signed by the build server is then required.  
-#
-# Thanks for bearing with me in the convoluted discussion, above.
-
-
-# Script flow:
-# - call signing server to sign kernels (if they exist and are new, as with
-#   other RPMs)
-# - replace old kernel packages with newly signed ones
-# - call signing server to sign grub (and replace old version with the newly
-#   signed one)
-# - call signing server to sign shim-unsigned (replace old version)
-# - rebuild shim-signed 
-# - update our repos to advertize all newly replaced packages
-
-# check_if_pkg_needs_signing <path/to/filename.rpm>
-#
-# Checks to see if a given package needs to be signed.  We maintain a list of
-# MD5 sums for RPMs we have signed.  Thus, we can easily see if we've already
-# signed a package.
-#
-# Returns 1 if the package does need signing, or 0 if package does not
-#
-# This function expects the package specified to exist.
-function check_if_pkg_needs_signing
-{
-    local PKG_TO_CHECK=$1
-
-    if [ ! -e ${SIGNED_PKG_DB} ]; then
-        # We haven't signed anything before, so this package needs signing
-        return 1
-    fi
-
-    local SIGNED_PKG_MD5=`grep ${PKG_TO_CHECK} ${SIGNED_PKG_DB} | cut -d ' ' -f 1`
-    if [ "x${SIGNED_PKG_MD5}" == "x" ]; then
-        # We have no record of having signed the package -- needs signing
-        return 1
-    fi
-
-    local CURRENT_MD5=`md5sum ${PKG_TO_CHECK} | cut -d ' ' -f 1`
-    if [ "${CURRENT_MD5}" != "${SIGNED_PKG_MD5}" ]; then
-        # The package has been regenerated since we last signed it -- needs
-        # signing again
-        return 1
-    fi
-
-    # The package md5 sum matches the md5sum of the package when it was last
-    # signed.
-    return 0
-}
-
-# update_signed_pkg_list <path/to/filename.rpm>
-#
-# Updated our list of signed packages with the md5 sum of a recently signed
-# package.
-#
-# This function expects the package to exist.
-function update_signed_pkg_list
-{
-    local PKG_TO_ADD=$1
-
-    if [ ! -e ${SIGNED_PKG_DB} ]; then
-        touch ${SIGNED_PKG_DB}
-    fi
-
-    # remove current entry for package
-    local TMPFILE=`mktemp`
-    grep -v $(basename ${PKG_TO_ADD}) ${SIGNED_PKG_DB} > ${TMPFILE}
-    mv ${TMPFILE} ${SIGNED_PKG_DB}
-
-    # add MD5 for package to the package list
-    md5sum ${PKG_TO_ADD} >> ${SIGNED_PKG_DB}
-}
-
-
-# update_repo <std|rt>
-#
-# Updates either the standard or rt repo with latest packages
-# Checks that you specified a repo, and that the path exists.
-#
-# There are actually now two places we need to update -- the
-# rpmbuild/RPMS/ path, as well as the results/.../ path
-function update_repo
-{
-	local BUILD_TYPE=$1
-	local EXTRA_PARAMS=""
-	local RETCODE=0
-	local repopath=""
-
-	if [ "x$BUILD_TYPE" == "x" ]; then
-		return 1
-	fi
-
-	if [ "x$MY_BUILD_ENVIRONMENT_TOP" == "x" ]; then
-		return 1
-	fi
-
-	for repopath in "$MY_WORKSPACE/$BUILD_TYPE/rpmbuild/RPMS" "$MY_WORKSPACE/$BUILD_TYPE/results/${MY_BUILD_ENVIRONMENT_TOP}-$BUILD_TYPE"; do
-		if [ ! -d "$repopath" ]; then
-			echo "Error - cannot find path $repopath"
-			return 1
-		fi
-
-		cd $repopath
-		if [ -f comps.xml ]; then
-			EXTRA_PARAMS="-g comps.xml"
-		fi
-		createrepo --update $EXTRA_PARAMS . > /dev/null
-		RETCODE=$?
-		cd - > /dev/null
-		if [ 0$RETCODE -ne 0 ]; then
-			return $RETCODE
-		fi
-	done
-
-	return $RETCODE
-}
-
-# sign_shims - find and sign any shim package that we need
-#              Note that shim might produce a "shim-unsigned-[verison-release]
-#              package (old shim) or shim-unsigned-x64-[v-r] &
-#              shim-unsigned-ia32 package (new shim).  In the case of new shim,
-#              we must do x64 only, and not ia32.
-#
-function sign_shims
-{
-	SHIM=`find $MY_WORKSPACE/std/rpmbuild/RPMS -name "shim-unsigned-x64-*.$ARCH.rpm" | grep -v debuginfo`
-	if [ -z "$SHIM" ]; then
-		SHIM=`find $MY_WORKSPACE/std/rpmbuild/RPMS -name "shim-unsigned-*.$ARCH.rpm" | grep -v debuginfo`
-	fi
-	if [ -z "${SHIM}" ]; then
-	    echo "Warning -- cannot find shim package to sign"
-	    return 0
-	fi
-	sign shim $SHIM
-
-	return $?
-}
-
-# sign_grubs - find and sign any grub package that we need to.
-#              Grub (and kernel) are initially signed with temporary keys, so
-#              we need to upload both the complete package, as well as the
-#              unsigned binaries
-#
-function sign_grubs
-{
-	GRUB=`find $MY_WORKSPACE/std/rpmbuild/RPMS -name "grub2-efi-x64-[1-9]*.$ARCH.rpm"`
-	UNSIGNED_GRUB=`find $MY_WORKSPACE/std/rpmbuild/RPMS -name "grub2-efi-x64-unsigned*.$ARCH.rpm"`
-	if [ "x${GRUB}" == "x" ]; then
-	    echo "Warning -- cannot find GRUB package to sign"
-	    return 0
-	fi
-	if [ "x${UNSIGNED_GRUB}" == "x" ]; then
-	    echo "Warning -- cannot find unsigned GRUB package to sign"
-	    return 0
-	fi
-
-	sign grub2 $GRUB $UNSIGNED_GRUB
-	return $?
-}
-
-# sign_kernels - find and sign any kernel package that we need to.
-#
-function sign_kernels
-{
-    sign_kernel "std" ""
-    sign_kernel "rt" "-rt"
-}
-
-# sign_kernel - find and sign kernel package if we need to.
-#              Kernels (and grub) are initially signed with temporary keys, so
-#              we need to upload both the complete package, as well as the
-#              unsigned binaries
-function sign_kernel
-{
-    local KERNEL_PATH=$1
-    local KERNEL_EXTRA=$2
-
-    local KERNEL=""
-    local UNSIGNED_KERNEL=""
-    local RPM=""
-    local VMLINUZ=""
-
-    # 5.xx series kernels store vmlinuz in the 'kernel-core' package
-    KERNEL=$(find $MY_WORKSPACE/${KERNEL_PATH}/rpmbuild/RPMS -name "kernel${KERNEL_EXTRA}-core-[1-9]*.$ARCH.rpm")
-    if [ "x${KERNEL}" == "x" ]; then
-        # Older kernels store vmlinuz in the 'kernel' package
-        KERNEL=$(find $MY_WORKSPACE/${KERNEL_PATH}/rpmbuild/RPMS -name "kernel${KERNEL_EXTRA}-[1-9]*.$ARCH.rpm")
-        if [ "x${KERNEL}" == "x" ]; then
-            echo "Warning -- cannot find kernel package to sign in ${KERNEL_PATH}"
-            return 0
-        fi
-    fi
-
-    # The unsigned vmlinuz is in the 'kernel-unsigned' package for ALL kernels.
-    UNSIGNED_KERNEL=$(find $MY_WORKSPACE/${KERNEL_PATH}/rpmbuild/RPMS -name "kernel${KERNEL_EXTRA}-unsigned-[1-9]*.$ARCH.rpm")
-    if [ "x${UNSIGNED_KERNEL}" == "x" ]; then
-        echo "Warning -- cannot find unsigned kernel package to sign in ${KERNEL_PATH}"
-        return 0
-    fi
-
-    # check for vmlinuz
-    for RPM in $KERNEL $UNSIGNED_KERNEL; do
-        VMLINUZ=$(rpm -q -l -p $RPM | grep '/boot/vmlinuz')
-        if [ $? -ne 0 ]; then
-            echo "Error -- cannot find /boot/vmlinuz in ${RPM}"
-            return 1
-        fi
-    done
-
-    sign kernel $KERNEL $UNSIGNED_KERNEL
-    return $?
-}
-
-# rebuild_pkgs - rebuild any packages that need to be updated from the newly
-# signed binaries
-#
-function rebuild_pkgs
-{
-	local LOGFILE="$MY_WORKSPACE/export/signed-rebuild.log"
-	local PKGS_TO_REBUILD=${REBUILD_LIST}
-
-	if [ "x${PKGS_TO_REBUILD}" == "x" ]; then
-	    # No rebuilds required, return cleanly
-	    return 0
-	fi
-
-	# If we reach this point, then we have one or more packages to be rebuilt
-
-	# first, update the repo so it is aware of the "latest" binaries
-	update_repo std
-	if [ $? -ne 0 ]; then
-		echo "Could not update signed packages -- could not update repo"
-		return 1
-	fi
-
-        echo "Performing rebuild of packages: $PKGS_TO_REBUILD"
-        FORMAL_BUILD=0 build-pkgs --no-descendants --no-build-info --no-required --careful --append-log $PKGS_TO_REBUILD > $LOGFILE 2>&1
-
-	if [ $? -ne 0 ]; then
-		echo "Could not rebuild packages: $PKGS_TO_REBUILD -- see $LOGFILE for details"
-		return 1
-	fi
-
-	echo "Done"
-	return 0
-}
-
-# sign <type_of_pkg> <pkg> [pkg_containing_unsigned_bins]
-#
-# This routine uploads a package to the signing server, instructs the signing
-# signing server to do its' magic, and downloads the updated (signed) package
-# from the signing server.
-#
-# Accessing the signing server -- the signing server cannot just be logged
-# into by anyone.  A small number of users (Jason McKenna, Scott Little, Greg
-# Waines, etc) have permission to log in as themselves.  In addition, there is
-# a user "signing" who is unique to the server.  The "jenkins" user on our
-# build servers has permission to login/upload files as "signing" due to Jenkins'
-# private SSH key being added to the signing user's list of keys.  This means
-# that Jenkins can upload and run commands on the server as "signing".
-#
-# In addition to uploading files as signing, the signing user has permissions to
-# run a single command (/opt/signing/sign.sh) as a sudo root user.  The signing
-# user does not have access to modify the script or to run any other commands as
-# root.  The sign.sh script will take inputs (the files that jenkins has
-# uploaded), verify the contents, sign the images against private keys, and
-# output a new .rpm contianing the signed version of the files.  Assuming all
-# is successful, the filename of the signed output file is returned, and the
-# jenkins user can then use that filename to download the file (the "signing"
-# user does not have access to remove or modify the file once it's created).
-#
-# All operations done on the signing server are logged in muliple places, and
-# the output RPM artifacts are timestamped to ensure that they are not
-# overwritten by subsequent calls to sign.sh.
-#
-# kernel and grub package types require you to specify/upload the unsigned
-# packages as well as the normal binary
-function sign
-{
-	local TYPE=$1
-	local FILE=$2
-	local UNSIGNED=$3
-	local UNSIGNED_OPTION=""
-	local TMPFILE=`mktemp /tmp/sign.XXXXXXXX`
-
-	# Don't sign if we've already signed it
-	check_if_pkg_needs_signing ${FILE}
-	if [ $? -eq 0 ]; then
-		echo "Not signing ${FILE} as we previously signed it"
-		return 0
-	fi
-
-	echo "Signing $FILE"
-
-	# upload the original package
-	scp -q $FILE $SIGNING_USER@$SIGNING_SERVER:$UPLOAD_PATH
-	if [ $? -ne 0 ]; then
-		echo "Failed to upload file $FILE"
-		\rm -f $TMPFILE
-		return 1
-	fi
-	
-	# upload the unsigned package (if specified)
-	if [ "x$UNSIGNED" != "x" ]; then
-		echo "Uploading unsigned: $UNSIGNED"
-		scp -q $UNSIGNED $SIGNING_USER@$SIGNING_SERVER:$UPLOAD_PATH
-		if [ $? -ne 0 ]; then
-			echo "Failed to upload file $UNSIGNED"
-			\rm -f $TMPFILE
-			return 1
-		fi
-		UNSIGNED=$(basename $UNSIGNED)
-		UNSIGNED_OPTION="-u $UPLOAD_PATH/$UNSIGNED"
-	fi
-
-	# Call the magic script on the signing server.  Note that the user
-	# ($SIGNING_USER) has sudo permissions but only to invoke this one script.
-	# The signing user cannot make other sudo calls.
-	#
-	# We place output in $TMPFILE to extract the output file name later
-	#
-	ssh $SIGNING_USER@$SIGNING_SERVER sudo $SIGNING_SCRIPT -v -i $UPLOAD_PATH/$(basename $FILE) $UNSIGNED_OPTION -t $TYPE > $TMPFILE 2>&1
-	if [ $? -ne 0 ]; then
-		echo "Signing of $FILE failed"
-		\rm -f $TMPFILE
-		return 1
-	fi
-	
-	# The signing server script will output the name by which the newly signed
-	# RPM can be found.  This will be a unique filename (based on the unique
-	# upload directory generated by the "-r" option above).
-	#
-	# The reason for this is so that we can archive all output files
-	# and examine them later without them being overwriten.  File paths are
-	# typically of the form
-	#
-	# /export/signed_images/XXXXXXX_grub2-efi-64-2.02-0.44.el7.centos.tis.3.x86_64.rpm
-	#
-	# Extract the output name, and copy the RPM back into our system
-	# (Note that we overwrite our original version of the RPM)
-	#
-	# Note that some packages (like grub) may produce multiple output RPMs (i.e.
-	# multiple lines list output files.
-	OUTPUT=`grep "Output written:" $TMPFILE | sed "s/Output written: //"`
-	
-	# Check that we got something
-	if [ "x$OUTPUT" == "x" ]; then
-		echo "Could not determine output file -- check logs on signing server for errors"
-		\cp $TMPFILE $MY_WORKSPACE/export/signing.log
-		\rm -f $TMPFILE
-		return 1
-	fi
-
-	# The signing script can return multiple output files, if appropriate for
-	# the input RPM source type.  Copy each output RPM to our repo
-	# Note that after we download the file we extract the base package name
-	# from the RPM to find the name of the file that it *should* be named
-	#
-	# example:
-	#   we'd download "Zrqyeuzw_kernel-3.10.0-514.2.2.el7.20.tis.x86_64.rpm"
-	#   we'd figure out that the RPM name should be "kernel"
-	#   we look for "kernel" in the RPM filename, and rename
-	#     "Zrqyeuzw_kernel-3.10.0-514.2.2.el7.20.tis.x86_64.rpm" to
-	#     "kernel-3.10.0-514.2.2.el7.20.tis.x86_64.rpm"
-	while read OUTPUT_FILE; do
-
-		# Download the file from the signing server
-		local DOWNLOAD_FILENAME=$(basename $OUTPUT_FILE)
-		scp -q $SIGNING_USER@$SIGNING_SERVER:$OUTPUT_FILE $(dirname $FILE)
-		if [ $? -ne 0 ]; then
-			\rm -f $TMPFILE
-			echo "Copying file from signing server failed"
-			return 1
-		fi
-		echo "Successfully retrieved $OUTPUT_FILE"
-
-		# figure out what the file should be named (strip away leading chars)
-		local RPM_NAME=`rpm -qp $(dirname $FILE)/$DOWNLOAD_FILENAME --qf="%{name}"`
-		local CORRECT_OUTPUT_FILE_NAME=`echo $DOWNLOAD_FILENAME | sed "s/^.*$RPM_NAME/$RPM_NAME/"`
-
-		# rename the file
-		\mv -f $(dirname $FILE)/$DOWNLOAD_FILENAME $(dirname $FILE)/$CORRECT_OUTPUT_FILE_NAME
-
-		# replace the version of the file in results
-		#
-		# Potential hiccup in future -- this code currenty replaces any output file in EITHER
-		# std or rt results which matches the filename we just downloaded from the signing.
-		# server.  This means there could be an issue where we sign something-ver-rel.arch.rpm
-		# but we expect different versions of that RPM in std and in rt.  Currently, we do not
-		# have any RPMs which have that problem (all produced RPMs in rt have the "-rt" suffix
-		# let along any "signed" rpms) but it's something of which to be aware.
-		#
-		# Also, note that we do not expect multiple RPMs in each repo to have the same filename.
-		# We use "head -n 1" to handle that, but again it shouldn't happen.
-		# 
-		for buildtype in std rt; do
-			x=`find $MY_WORKSPACE/$buildtype/results/${MY_BUILD_ENVIRONMENT_TOP}-$buildtype -name $CORRECT_OUTPUT_FILE_NAME | head -n 1`
-			if [ ! -z "$x" ]; then
-				cp $(dirname $FILE)/$CORRECT_OUTPUT_FILE_NAME $x
-			fi
-		done
-
-		echo "Have signed file $(dirname $FILE)/$CORRECT_OUTPUT_FILE_NAME"
-	done <<< "$OUTPUT"
-
-	\rm -f $TMPFILE
-
-	# If we just signed a shim package, flag that shim needs to be rebuilt
-	if [ "${TYPE}" == "shim" ]; then
-		REBUILD_LIST="${REBUILD_LIST} shim-signed"
-	fi
-
-	echo "Done"
-	update_signed_pkg_list ${FILE}
-
-	return 0
-}
-
-# Main script
-
-if [ "x$MY_WORKSPACE" == "x" ]; then
-	echo "Environment not set up -- abort"
-	exit 1
-fi
-
-ARCH="x86_64"
-SIGNING_SCRIPT=/opt/signing/sign.sh
-UPLOAD_PATH=`ssh $SIGNING_USER@$SIGNING_SERVER sudo $SIGNING_SCRIPT -r`
-SIGNED_PKG_DB=${MY_WORKSPACE}/signed_pkg_list.txt
-REBUILD_LIST=""
-MY_BUILD_ENVIRONMENT_TOP=${MY_BUILD_ENVIRONMENT_TOP:-$MY_BUILD_ENVIRONMENT}
-
-# Check that we were able to request a unique path for uploads
-echo $UPLOAD_PATH | grep -q "^Upload:"
-if [ $? -ne 0 ]; then
-	echo "Failed to get upload path -- abort"
-	exit 1
-fi
-UPLOAD_PATH=`echo $UPLOAD_PATH | sed "s%^Upload: %%"`
-
-sign_kernels
-if [ $? -ne 0 ]; then
-	echo "Failed to sign kernels -- abort"
-	exit 1
-fi
-
-sign_shims
-if [ $? -ne 0 ]; then
-	echo "Failed to sign shims -- abort"
-	exit 1
-fi
-
-sign_grubs
-if [ $? -ne 0 ]; then
-	echo "Failed to sign grubs -- abort"
-	exit 1
-fi
-
-update_repo std
-if [ $? -ne 0 ]; then
-	echo "Failed to update std repo -- abort"
-	exit 1
-fi
-
-rebuild_pkgs
-if [ $? -ne 0 ]; then
-	echo "Failed to update builds with signed dependancies -- abort"
-	exit 1
-fi
-
-update_repo std
-if [ $? -ne 0 ]; then
-	echo "Failed to update std repo -- abort"
-	exit 1
-fi
-
-update_repo rt
-if [ $? -ne 0 ]; then
-	echo "Failed to update rt repo -- abort"
-	exit 1
-fi
-
diff --git a/build-tools/source_lookup.txt b/build-tools/source_lookup.txt
deleted file mode 100644
index fe0145ef..00000000
--- a/build-tools/source_lookup.txt
+++ /dev/null
@@ -1,85 +0,0 @@
-git://git.qemu.org/qemu.git qemu-kvm-ev 2.3.0
-https://github.com/openstack/nova.git openstack-nova 2015.1.0
-git://libvirt.org/libvirt.git libvirt 1.2.17
-http://www.drbd.org/download/drbd/8.4/archive/drbd-8.4.3.tar.gz drbd 8.4.3
-https://github.com/openstack/neutron.git openstack-neutron 2015.1.2
-https://github.com/openstack/ceilometer.git openstack-ceilometer 2015.1.2
-git://dpdk.org/dpdk cgcs-dpdk 2.2.0
-git://dpdk.org/dpdk cgcs-dpdk-rt 2.2.0
-http.debian.net/debian/pool/main/d/dpkg/dpkg_1.18.4.tar.xz dpkg 1.18.4
-https://sourceforge.net/projects/e1000/files/i40e%20stable/1.4.25/i40e-1.4.25.tar.gz/download i40e-kmod 1.4.25
-http://dpdk.org/download/mlx4/2015-05-27-DPDK-v2.0.0/libmlx4-1.0.5mlnx1.tar.gz libmlx4-dpdk 1.0.5
-https://www.kernel.org/pub/software/utils/dtc/dtc-1.4.0.tar.gz libfdt 1.4.0
-https://github.com/openstack/heat.git openstack-heat 2015.1.2
-https://github.com/openstack/keystone.git openstack-keystone 2015.1.0
-https://github.com/openstack/puppet-ceilometer.git puppet-ceilometer 5.1.0
-https://github.com/openstack/puppet-ceph.git puppet-ceph 0.1.0
-https://github.com/openstack/puppet-cinder.git puppet-cinder 5.1.0
-https://github.com/openstack/puppet-glance.git puppet-glance 5.1.0
-https://github.com/openstack/puppet-heat.git puppet-heat 5.1.0
-https://github.com/openstack/puppet-horizon.git puppet-horizon 5.1.0
-https://github.com/openstack/puppet-keystone.git puppet-keystone 5.1.0
-https://github.com/openstack/puppet-neutron.git puppet-neutron 5.1.0
-https://github.com/openstack/puppet-nova.git puppet-nova 5.1.0
-https://github.com/openstack/puppet-openstacklib.git puppet-openstacklib 5.1.0
-https://github.com/openstack/puppet-swift.git puppet-swift 5.1.0
-https://github.com/openstack/puppet-tempest.git puppet-tempest 5.1.0
-https://github.com/openstack/puppet-vswitch.git puppet-vswitch 1.1.0
-https://github.com/adrienthebo/puppet-boolean.git puppet-boolean 1.0.2
-https://github.com/rcritten/puppet-certmonger.git puppet-certmonger 1.0.3
-https://github.com/puppetlabs/puppetlabs-concat.git puppet-concat 1.2.3
-https://github.com/puppetlabs/puppetlabs-create_resources.git puppet-create_resources 0.0.1
-github.com/netmanagers/puppet-dnsmasq puppet-dnsmasq 1.1.0
-https://github.com/puppetlabs/puppetlabs-drbd.git puppet-drbd 0.1.0
-https://github.com/voxpupuli/puppet-filemapper puppet-filemapper 1.1.3
-https://github.com/puppetlabs/puppetlabs-firewall.git puppet-firewall 1.6.0
-https://github.com/puppetlabs/puppetlabs-haproxy.git puppet-haproxy 1.2.0
-https://github.com/puppetlabs/puppetlabs-inifile.git puppet-inifile 1.3.0
-https://github.com/camptocamp/puppet-kmod puppet-kmod 2.1.1
-https://github.com/torian/puppet-ldap puppet-ldap 0.2.4
-https://github.com/puppetlabs/puppetlabs-lvm.git puppet-lvm 0.5.0
-https://github.com/voxpupuli/puppet-network puppet-network 1.0.2
-https://github.com/jlyheden/puppet-nslcd puppet-nslcd 0.0.1
-https://github.com/rcritten/puppet-nssdb puppet-nssdb 1.0.1
-https://github.com/puppetlabs/puppetlabs-postgresql.git puppet-postgresql 4.3.0
-https://github.com/example42/puppi puppet-puppi 2.1.11
-https://github.com/puppetlabs/puppetlabs-rabbitmq.git puppet-rabbitmq 5.2.2
-https://github.com/puppetlabs/puppetlabs-rsync.git puppet-rsync 0.4.0
-https://github.com/puppetlabs/puppetlabs-stdlib.git puppet-stdlib 4.6.0
-https://github.com/puppetlabs/puppetlabs-sysctl.git puppet-sysctl 0.1.0
-https://github.com/puppetlabs/puppetlabs-vcsrepo.git puppet-vcsrepo 1.3.0
-https://github.com/derekhiggins/puppet-vlan puppet-vlan 0.1.0
-https://github.com/puppetlabs/puppetlabs-xinetd.git puppet-xinetd 1.5.0
-https://github.com/dmsimard/python-cephclient python-cephclient 0.1.0.5
-https://github.com/jaraco/keyring python-keyring 5.3
-http://vincentbernat.github.com/lldpd/ lldpd 0.9.0
-https://launchpad.net/tempest tempest 4
-https://toolbelt.readthedocs.org/ requests-toolbelt 0.5.1
-https://pypi.python.org/pypi/WSME python-wsme 0.6.4
-https://github.com/madkiss/openstack-resource-agents/tree/stable-grizzly openstack-ras 1.0.0
-https://github.com/openstack/python-ceilometerclient python-ceilometerclient 1.0.14
-https://github.com/openstack/python-cinderclient/archive python-cinderclient 1.1.3
-http://horizon.openstack.org/ python-django-horizon 2015.1.0
-http://github.com/openstack/python-glanceclient python-glanceclient 0.17.1
-https://github.com/openstack/python-heatclient python-heatclient 0.4.0
-https://github.com/openstack/python-keystoneclient python-keystoneclient 1.3.1
-http://launchpad.net/python-neutronclient/ python-neutronclient 2.4.0
-https://pypi.python.org/pypi/python-novaclient python-novaclient 2.23.0
-https://en.osdn.jp/projects/sfnet_ldapscripts/releases/ ldapscripts 2.0.5
-http://dpdk.org/download/mlx4/2015-05-27-DPDK-v2.0.0/libibverbs-1.1.7mlnx1.tar.gz libibverbs-dpdk 1.1.7
-http://www.openstack.org/software/openstack-storage/ openstack-cinder 2015.1.0
-http://glance.openstack.org openstack-glance 2015.1.0
-https://github.com/stackforge/packstack packstack 2014.1.0
-https://github.com/stackforge/puppet puppet 3.7.4
-http://www.drbd.org/ drbd-kernel 8.4.7
-http://ceph.com/ ceph 0.94.6
-https://sourceforge.net/p/ibmtpm20tss/tss/ci/v930/tree/ tss2 930
-https://git.centos.org/git/rpms/rt-setup rt-setup 1.59
-https://git.centos.org/git/rpms/rtctl rtctl 1.13
-https://github.com/openstack/kingbird.git distributedcloud 1.0.0
-https://github.com/openstack/python-kingbirdclient.git distributedcloud-client 1.0.0
-http://git.infradead.org/users/jjs/linux-tpmdd.git tpm-kmod 4.12
-http://git.infradead.org/users/jjs/linux-tpmdd.git tpm-kmod-rt 4.12
-http://git.infradead.org/users/jjs/linux-tpmdd.git integrity-kmod 4.12 # yes, integrity (IMA) and tpm come from the same place
-http://git.infradead.org/users/jjs/linux-tpmdd.git integrity-kmod-rt 4.12
-
diff --git a/build-tools/spec-utils b/build-tools/spec-utils
deleted file mode 100644
index 6e531158..00000000
--- a/build-tools/spec-utils
+++ /dev/null
@@ -1,713 +0,0 @@
-RPM_MACRO_FILE=/usr/lib/rpm/macros
-
-spec_query_with_macros () {
-   local SPEC_FILE=$1; shift
-   local BUILD_DIR=$1; shift
-   local TIS_PATCH_VER=$1; shift
-   local PBR_VERSION=$1; shift
-   local rc
-
-   TMPSPEC=$(mktemp /tmp/spec-utils-XXXXXX)
-   cat $SPEC_FILE | sed 's/%(rpm.*)/%(echo 0)/' > $TMPSPEC
-
-   rpmspec -P \
-      --define="_tis_build_type ${BUILD_TYPE:-std}" \
-      --define="_tis_dist .tis" \
-      --define="tis_patch_ver ${TIS_PATCH_VER:-0}" \
-      --define="pbr_version ${PBR_VERSION:-0}" \
-      --define="platform_release ${PLATFORM_RELEASE:-00.00}" \
-      --define="%_topdir $BUILD_DIR" \
-      "${@}" \
-      $TMPSPEC 2>> /dev/null
-   rc=$?
-
-   \rm -f $TMPSPEC
-   return $rc
-}
-
-spec_evaluate () {
-   local RAW_VALUE=$1
-   local SPEC_FILE=$2
-   local RPMBUILD_DIR=$3
-
-   local LAST_SPEC_EVALUATING="$SPEC_EVALUATING"
-   local MACRO=""
-   local MACRO_VALUE=""
-   local RC=0
-
-   if [ "x$RPMBUILD_DIR" == "x" ];then
-      RPMBUILD_DIR=$(dirname $(dirname $SPEC_FILE))
-   fi
-
-   MACRO=$(expr match "$RAW_VALUE" '.*\(%{[^}]*}\)')
-   if [ $? -ne 0 ]; then
-      echo "$RAW_VALUE"
-      return 0
-   fi
-
-   if [ "x$SPEC_EVALUATING" == "x" ]; then
-       SPEC_EVALUATING=":$MACRO:"
-   else
-       echo "$SPEC_EVALUATING" | grep -q ":$MACRO:"
-       if [ $? -eq 0 ]; then
-           # Break a recursion
-           >&2 echo "ERROR: evaluation of macro '$MACRO' failed due to recursion"
-           return 1
-       fi
-       SPEC_EVALUATING="$LAST_SPEC_EVALUATING$MACRO:"
-   fi
-
-   # >&2 echo "spec_evaluate: MACRO=$MACRO"
-   local MACRO_NAME2=${MACRO#%{}
-   local MACRO_NAME3=${MACRO_NAME2%\}}
-   local PREFIX=$(expr match "$MACRO_NAME3" '\([!?]*\)')
-   local MACRO_NAME=${MACRO_NAME3#${PREFIX}}
-
-   # >&2 echo "spec_evaluate: MACRO_NAME=$MACRO_NAME"
-   MACRO_VALUE=$(spec_find_macro $MACRO_NAME $SPEC_FILE $RPMBUILD_DIR)
-   if [ $? -ne 0 ]; then
-      # >&2 echo "CALL: spec_find_global $MACRO_NAME $SPEC_FILE $RPMBUILD_DIR"
-      MACRO_VALUE=$(spec_find_global $MACRO_NAME $SPEC_FILE $RPMBUILD_DIR)
-      if [ $? -ne 0 ]; then
-         MACRO_VALUE=$(spec_find_tag ${MACRO_NAME^} $SPEC_FILE $RPMBUILD_DIR)
-         if [ $? -ne 0 ]; then
-            MACRO_VALUE=$(macro_find_macro $MACRO_NAME $SPEC_FILE $RPMBUILD_DIR)
-            if [ $? -ne 0 ]; then
-               MACRO_VALUE=$(spec_find_macro_via_rpm $MACRO_NAME $SPEC_FILE $RPMBUILD_DIR)
-               if [ $? -ne 0 ]; then
-                  case "$MACRO_NAME" in
- 
-                     _tis_build_type)    MACRO_VALUE="${BUILD_TYPE}" ;;
-                     _tis_dist)          MACRO_VALUE=".tis" ;;
-                     tis_patch_ver)      MACRO_VALUE="{TIS_PATCH_VER:-0}" ;;
-                     pbr_version)        MACRO_VALUE="{PBR_VERSION:-0}" ;;
-                     platform_release)   MACRO_VALUE="$PLATFORM_RELEASE" ;;
-                     _topdir)            MACRO_VALUE="$BUILD_DIR" ;;
-                     *) ;;
-                  esac
-
-                  if [ "x$MACRO_VALUE" == "x" ]; then
-                     if [ "$PREFIX" == '?' ]; then
-                        >&2 echo "NOTE: optional macro '$MACRO' not defined"
-                     else
-                        >&2 echo "ERROR: evaluation of macro '$MACRO' failed"
-                        SPEC_EVALUATING="$LAST_SPEC_EVALUATING"
-                        return 1
-                     fi
-                  fi
-               fi
-            fi
-         fi
-      fi
-   fi
-
-   # >&2 echo "spec_evaluate: MACRO_VALUE=$MACRO_VALUE"
-   local NEW_VALUE=${RAW_VALUE/"${MACRO}"/${MACRO_VALUE}}
-   # >&2 echo "spec_evaluate: NEW_VALUE=$NEW_VALUE"
-   spec_evaluate "$NEW_VALUE" "$SPEC_FILE" "$RPMBUILD_DIR"
-   RC=$?
-   SPEC_EVALUATING="$LAST_SPEC_EVALUATING"
-   return $RC
-}
-
-macro_find_macro () {
-   local TARGET=$1
-   local SPEC_FILE=$2
-   local RPMBUILD_DIR=$3
-   local LINE=""
-
-   if [ "x$RPMBUILD_DIR" == "x" ];then
-      RPMBUILD_DIR=$(dirname $(dirname $SPEC_FILE))
-   fi
-
-   # >&2 echo "grep ^%$TARGET $RPM_MACRO_FILE"
-   LINE=$(grep "^%$TARGET[[:space:]]" $RPM_MACRO_FILE)
-   if [ $? -eq 1 ]; then
-       >&2 echo "macro_find_macro: '%$TARGET' not found in file '$RPM_MACRO_FILE'"
-       echo ""
-       return 1
-   fi
-
-   # >&2 echo "macro_find_macro: LINE=$LINE"
-   local UNSTRIPED_VALUE=${LINE##"%$TARGET"}
-   # >&2 echo "macro_find_macro: UNSTRIPED_VALUE=$UNSTRIPED_VALUE"
-   local RAW_VALUE=${UNSTRIPED_VALUE//[[:space:]]/}
-   # >&2 echo "macro_find_macro: RAW_VALUE=$RAW_VALUE"
-
-   spec_evaluate "$RAW_VALUE" "$SPEC_FILE" "$RPMBUILD_DIR"
-}
-
-spec_find_macro_via_rpm () {
-   local TARGET=$1
-   local SPEC_FILE=$2
-   local RPMBUILD_DIR=$3
-
-   local RC=1
-
-   if [ "x$RPMBUILD_DIR" == "x" ];then
-      RPMBUILD_DIR=$(dirname $(dirname $SPEC_FILE))
-   fi
-
-   # >&2 echo "spec_find_macro_via_rpm: TARGET=$TARGET"
-
-   case "$TARGET" in
-      name|_name)       (spec_query_with_macros $SPEC_FILE $RPMBUILD_DIR ${TIS_PATCH_VER:-0} ${PBR_VERSION:-0} -q --qf '%{NAME}\n' | head -n 1 ; exit ${PIPESTATUS[0]} ); RC=$? ;;
-      version|_version) (spec_query_with_macros $SPEC_FILE $RPMBUILD_DIR ${TIS_PATCH_VER:-0} ${PBR_VERSION:-0} -q --qf '%{VERSION}\n' | head -n 1 ; exit ${PIPESTATUS[0]} ); RC=$? ;;
-      release|_release) (spec_query_with_macros $SPEC_FILE $RPMBUILD_DIR ${TIS_PATCH_VER:-0} ${PBR_VERSION:-0} -q --qf '%{RELEASE}\n' | head -n 1 ; exit ${PIPESTATUS[0]} ); RC=$? ;;
-      *) ;;
-   esac
-
-   if [ $RC -ne 0 ]; then
-      echo ""
-   fi
-   return $RC
-}
-
-spec_find_macro () {
-   local TARGET=$1
-   local SPEC_FILE=$2
-   local RPMBUILD_DIR=$2
-   local LINE=""
-   local UNSTRIPED_VALUE=""
-
-   if [ "x$RPMBUILD_DIR" == "x" ];then
-      RPMBUILD_DIR=$(dirname $(dirname $SPEC_FILE))
-   fi
-
-   # >&2 echo "grep ^%define $TARGET $SPEC_FILE"
-   LINE=$(grep "^%define $TARGET[[:space:]]" $SPEC_FILE)
-   if [ $? -eq 1 ]; then
-       LINE=$(grep "^%$TARGET[[:space:]]" $SPEC_FILE)
-       if [ $? -eq 1 ]; then
-           >&2 echo "spec_find_macro: Neither '%define $TARGET' nor '%$TARGET' found in file '$SPEC_FILE'"
-           echo ""
-           return 1
-       else
-           UNSTRIPED_VALUE=${LINE##"%$TARGET"}
-       fi
-   else
-       UNSTRIPED_VALUE=${LINE##"%define $TARGET"}
-   fi
-
-   # >&2 echo "spec_find_macro: LINE=$LINE"
-   # >&2 echo "spec_find_macro: UNSTRIPED_VALUE=$UNSTRIPED_VALUE"
-   local RAW_VALUE=$(echo ${UNSTRIPED_VALUE} | sed -e 's/^ *//g;s/ *$//g')
-   # >&2 echo "spec_find_macro: RAW_VALUE=$RAW_VALUE"
-
-   spec_evaluate "$RAW_VALUE" "$SPEC_FILE" "$RPMBUILD_DIR"
-}
-
-spec_find_tag () {
-   local TARGET=$1
-   local SPEC_FILE=$2
-   local RPMBUILD_DIR=$3
-   local TIS_PATCH_VER=$4
-   local PBR_VERSION=$5
-   local LINE=""
-
-   if [ "x$RPMBUILD_DIR" == "x" ];then
-      RPMBUILD_DIR=$(dirname $(dirname $SPEC_FILE))
-   fi
-
-   local SPEC_FILE2=$(mktemp /tmp/tmp_spec_XXXXXX.spec)
-
-   # Note: ${VAR:-val} is bash syntax for providing a default value.
-   #       ie. if $VAR is not set, use 'val' as default value
-   spec_query_with_macros $SPEC_FILE $RPMBUILD_DIR ${TIS_PATCH_VER:-0} ${PBR_VERSION:-0} > $SPEC_FILE2
-   if [ $? != 0 ]; then
-      #  spec_query_with_macros can fail on grub2 if it's just a spec file without SOURCES
-      \cp $SPEC_FILE $SPEC_FILE2
-   fi
-
-   LINE=$(grep "^$TARGET:" $SPEC_FILE2 | head -n 1 ; exit ${PIPESTATUS[0]})
-   if [ $? -eq 1 ]; then
-       LINE=$(grep "^${TARGET^}:" $SPEC_FILE2 | head -n 1 ; exit ${PIPESTATUS[0]})
-       if [ $? -eq 1 ]; then
-           >&2 echo "spec_find_tag: '$TARGET:' not found in file '$SPEC_FILE'"
-           echo ""
-           \rm -f "$SPEC_FILE2"
-           return 1
-       else
-           TARGET=${TARGET^}
-       fi
-   fi
-   \rm -f "$SPEC_FILE2"
-
-   # >&2 echo "spec_find_tag: LINE=$LINE"
-   local UNSTRIPED_VALUE=${LINE##"$TARGET:"}
-   # >&2 echo "spec_find_tag: UNSTRIPED_VALUE=$UNSTRIPED_VALUE"
-   local RAW_VALUE=${UNSTRIPED_VALUE//[[:space:]]/}
-   # >&2 echo "spec_find_tag: RAW_VALUE=$RAW_VALUE"
-
-   spec_evaluate "$RAW_VALUE" "$SPEC_FILE" "$RPMBUILD_DIR"
-}
-
-spec_find_multi_tag () {
-   local TARGET=$1
-   local SPEC_FILE=$2
-   local RPMBUILD_DIR=$2
-   local LINE=""
-
-   if [ "x$RPMBUILD_DIR" == "x" ];then
-      RPMBUILD_DIR=$(dirname $(dirname $SPEC_FILE))
-   fi
-
-   while read LINE; do
-      # >&2 echo "spec_find_multi_tag: LINE=$LINE"
-      local UNSTRIPED_VALUE=${LINE##"$TARGET:"}
-      # >&2 echo "spec_find_multi_tag: UNSTRIPED_VALUE=$UNSTRIPED_VALUE"
-      local RAW_VALUE=${UNSTRIPED_VALUE//[[:space:]]/}
-      # >&2 echo "spec_find_multi_tag: RAW_VALUE=$RAW_VALUE"
-
-      spec_evaluate "$RAW_VALUE" "$SPEC_FILE" "$RPMBUILD_DIR"
-   done << EOF
-$(grep "^$TARGET:" $SPEC_FILE)
-EOF
-}
-
-spec_find_global () {
-   local TARGET=$1
-   local SPEC_FILE=$2
-   local RPMBUILD_DIR=$3
-   local LINE=""
-
-   if [ "x$RPMBUILD_DIR" == "x" ];then
-      RPMBUILD_DIR=$(dirname $(dirname $SPEC_FILE))
-   fi
-
-   LINE=$(grep "^%global $TARGET" $SPEC_FILE)
-   if [ $? -eq 1 ]; then
-       >&2 echo "spec_find_global: '%global $TARGET' not found in file '$SPEC_FILE'"
-       echo ""
-       return 1
-   fi
-
-   # >&2 echo "spec_find_global: LINE=$LINE"
-   local UNSTRIPED_VALUE=${LINE##"%global $TARGET"}
-   # >&2 echo "spec_find_global: UNSTRIPED_VALUE=$UNSTRIPED_VALUE"
-   local RAW_VALUE=${UNSTRIPED_VALUE//[[:space:]]/}
-   # >&2 echo "spec_find_global: RAW_VALUE=$RAW_VALUE"
-
-   spec_evaluate "$RAW_VALUE" "$SPEC_FILE" "$RPMBUILD_DIR"
-}
-
-spec_find_patch_args () {
-   local PATCH_NO="$1"
-   local SPEC_FILE="$2"
-   local RPMBUILD_DIR="$3"
-
-   local LINE=""
-   local LINE2=""
-   local PATCH_LOWER_NO
-
-   if [ "x$RPMBUILD_DIR" == "x" ];then
-      RPMBUILD_DIR=$(dirname $(dirname $SPEC_FILE))
-   fi
-
-   PATCH_LOWER_NO=$(echo $PATCH_NO | tr '[:upper:]' '[:lower:]')
-   LINE=$(grep "^%$PATCH_LOWER_NO " $SPEC_FILE)
-   if [ $? -eq 1 ]; then
-       >&2 echo "pec_find_patch_args: $PATCH_LOWER_NO' not found in file '$SPEC_FILE'"
-       echo "-p1"
-       return 1
-   fi
-   LINE2=$(spec_evaluate "$LINE" "$SPEC_FILE" "$RPMBUILD_DIR")
-
-   echo $LINE2 | cut -d' ' -f2- | sed 's/-b/-b -z/'
-   return 0
-}
-
-spec_list_packages () {
-   local SPEC_FILE=$1
-   local RPMBUILD_DIR=$2
-
-   local d=$(dirname $SPEC_FILE)
-   local bd=$(basename $d)
-
-   if [ "x$RPMBUILD_DIR" == "x" ];then
-      RPMBUILD_DIR=$(dirname $(dirname $SPEC_FILE))
-   fi
-
-   if [ "$bd" == "SPECS" ]; then
-       local dd=$(dirname $d)
-       spec_query_with_macros $SPEC_FILE $RPMBUILD_DIR ${TIS_PATCH_VER:-0} ${PBR_VERSION:-0} -q --qf '%{name}\n' --define="%_topdir $dd" 2>> /dev/null 
-   else
-       spec_query_with_macros $SPEC_FILE $RPMBUILD_DIR ${TIS_PATCH_VER:-0} ${PBR_VERSION:-0} -q --qf '%{name}\n' 2>> /dev/null 
-   fi
-
-   if [ $? -ne 0 ]; then
-      #  spec_query_with_macros can fail on grub2 if it's just a spec file without SOURCES
-      local NAME=$(spec_find_tag Name "$SPEC_FILE" $RPMBUILD_DIR ${TIS_PATCH_VER:-0} ${PBR_VERSION:-0})
-      if [ $? -ne 0 ]; then
-         >&2 echo "ERROR: failed to evaluate 'Name'"
-         return 1
-      fi
-      echo $NAME
-   
-      grep "^%package" $SPEC_FILE | while read PACKAGE_LINE; do
-          local PKG_NAME=""
-          local PKG_NAME_RAW=$(echo $PACKAGE_LINE | awk '{ print $2 }')
-          # >&2 echo "spec_list_packages: PKG_NAME_RAW=$PKG_NAME_RAW"
-   
-          local PKG_NAME_TEMP=""
-          if [ "$PKG_NAME_RAW" == "-n" ]; then
-              PKG_NAME_TEMP=$(echo $PACKAGE_LINE | awk '{ print $3 }')
-          else
-              PKG_NAME_TEMP="$NAME-$PKG_NAME_RAW"
-          fi
-          # >&2 echo "spec_list_packages: PKG_NAME_TEMP=$PKG_NAME_TEMP"
-   
-          PKG_NAME=$(spec_evaluate "$PKG_NAME_TEMP" "$SPEC_FILE" $RPMBUILD_DIR ${TIS_PATCH_VER:-0} ${PBR_VERSION:-0})
-          if [ $? -ne 0 ]; then
-              >&2 echo "ERROR: failed to evaluate package '$PACKAGE_LINE'"
-              return 1
-          fi
-          # >&2 echo "spec_list_packages: PKG_NAME=$PKG_NAME"
-   
-          echo $PKG_NAME
-      done
-   fi
-}
-
-spec_list_versioned_packages () {
-   local SPEC_FILE=$1
-   local RPMBUILD_DIR=$2
-
-   local d=$(dirname $SPEC_FILE)
-   local bd=$(basename $d)
-
-   if [ "x$RPMBUILD_DIR" == "x" ];then
-      RPMBUILD_DIR=$(dirname $(dirname $SPEC_FILE))
-   fi
-
-   if [ "$bd" == "SPECS" ]; then
-       local dd=$(dirname $d)
-       spec_query_with_macros $SPEC_FILE $RPMBUILD_DIR ${TIS_PATCH_VER:-0} ${PBR_VERSION:-0} -q --qf '%{name}-%{version}\n' --define="%_topdir $dd" 2>> /dev/null 
-   else
-       spec_query_with_macros $SPEC_FILE $RPMBUILD_DIR ${TIS_PATCH_VER:-0} ${PBR_VERSION:-0} -q --qf '%{name}-%{version}\n' 2>> /dev/null 
-   fi
-
-   if [ $? -ne 0 ]; then
-      #  spec_query_with_macros can fail on grub2 if it's just a spec file without SOURCES
-      local NAME=$(spec_find_tag Name "$SPEC_FILE" $RPMBUILD_DIR ${TIS_PATCH_VER:-0} ${PBR_VERSION:-0})
-      if [ $? -ne 0 ]; then
-         >&2 echo "ERROR: failed to evaluate 'Name'"
-         return 1
-      fi
-
-      local VERSION=$(spec_find_tag Version "$SPEC_FILE" $RPMBUILD_DIR ${TIS_PATCH_VER:-0} ${PBR_VERSION:-0})
-      echo "$NAME-$VERSION"
-   
-      grep "^%package" $SPEC_FILE | while read PACKAGE_LINE; do
-          local PKG_NAME=""
-          local PKG_NAME_RAW=$(echo $PACKAGE_LINE | awk '{ print $2 }')
-          # >&2 echo "spec_list_packages: PKG_NAME_RAW=$PKG_NAME_RAW"
-   
-          local PKG_NAME_TEMP=""
-          if [ "$PKG_NAME_RAW" == "-n" ]; then
-              PKG_NAME_TEMP=$(echo $PACKAGE_LINE | awk '{ print $3 }')
-          else
-              PKG_NAME_TEMP="$NAME-$PKG_NAME_RAW"
-          fi
-          # >&2 echo "spec_list_packages: PKG_NAME_TEMP=$PKG_NAME_TEMP"
-   
-          PKG_NAME=$(spec_evaluate "$PKG_NAME_TEMP" "$SPEC_FILE" $RPMBUILD_DIR ${TIS_PATCH_VER:-0} ${PBR_VERSION:-0})
-          if [ $? -ne 0 ]; then
-              >&2 echo "ERROR: failed to evaluate package '$PACKAGE_LINE'"
-              return 1
-          fi
-          # >&2 echo "spec_list_packages: PKG_NAME=$PKG_NAME"
-   
-          echo "$PKG_NAME-$VERSION"
-      done
-   fi
-}
-
-spec_name_ver_rel () {
-   local SPEC_FILE=$1
-   local RPMBUILD_DIR=$2
-
-   local NAME=""
-   local d=$(dirname $SPEC_FILE)
-   local bd=$(basename $d)
-
-   if [ "x$RPMBUILD_DIR" == "x" ];then
-      RPMBUILD_DIR=$(dirname $(dirname $SPEC_FILE))
-   fi
-
-   NAME=$(spec_find_tag Name $SPEC_FILE $RPMBUILD_DIR ${TIS_PATCH_VER:-0} ${PBR_VERSION:-0})
-   VERSION=$(spec_find_tag Version $SPEC_FILE $RPMBUILD_DIR ${TIS_PATCH_VER:-0} ${PBR_VERSION:-0})
-   RELEASE=$(spec_find_tag Release $SPEC_FILE $RPMBUILD_DIR ${TIS_PATCH_VER:-0} ${PBR_VERSION:-0})
-   echo "$NAME-$VERSION-$RELEASE"
-}
-
-spec_list_ver_rel_packages () {
-   local SPEC_FILE=$1
-   local RPMBUILD_DIR=$2
-
-   local d=$(dirname $SPEC_FILE)
-   local bd=$(basename $d)
-
-   if [ "x$RPMBUILD_DIR" == "x" ];then
-      RPMBUILD_DIR=$(dirname $(dirname $SPEC_FILE))
-   fi
-
-   if [ "$bd" == "SPECS" ]; then
-       local dd=$(dirname $d)
-       spec_query_with_macros $SPEC_FILE $RPMBUILD_DIR ${TIS_PATCH_VER:-0} ${PBR_VERSION:-0} -q --qf '%{name}-%{version}-%{release}\n' --define="%_topdir $dd" 2>> /dev/null 
-   else
-       spec_query_with_macros $SPEC_FILE $RPMBUILD_DIR ${TIS_PATCH_VER:-0} ${PBR_VERSION:-0} -q --qf '%{name}-%{version}-%{release}\n' 2>> /dev/null 
-   fi
-
-   if [ $? -ne 0 ]; then
-      #  spec_query_with_macros can fail on grub2 if it's just a spec file without SOURCES
-      local NAME=$(spec_find_tag Name "$SPEC_FILE" $RPMBUILD_DIR ${TIS_PATCH_VER:-0} ${PBR_VERSION:-0})
-      if [ $? -ne 0 ]; then
-         >&2 echo "ERROR: failed to evaluate 'Name'"
-         return 1
-      fi
-
-      local VERSION=$(spec_find_tag Version "$SPEC_FILE" $RPMBUILD_DIR ${TIS_PATCH_VER:-0} ${PBR_VERSION:-0})
-      local RELEASE=$(spec_find_tag Release "$SPEC_FILE" $RPMBUILD_DIR ${TIS_PATCH_VER:-0} ${PBR_VERSION:-0})
-      echo "$NAME-$VERSION-$RELEASE"
-   
-      grep "^%package" $SPEC_FILE | while read PACKAGE_LINE; do
-          local PKG_NAME=""
-          local PKG_NAME_RAW=$(echo $PACKAGE_LINE | awk '{ print $2 }')
-          # >&2 echo "spec_list_packages: PKG_NAME_RAW=$PKG_NAME_RAW"
-   
-          local PKG_NAME_TEMP=""
-          if [ "$PKG_NAME_RAW" == "-n" ]; then
-              PKG_NAME_TEMP=$(echo $PACKAGE_LINE | awk '{ print $3 }')
-          else
-              PKG_NAME_TEMP="$NAME-$PKG_NAME_RAW"
-          fi
-          # >&2 echo "spec_list_packages: PKG_NAME_TEMP=$PKG_NAME_TEMP"
-   
-          PKG_NAME=$(spec_evaluate "$PKG_NAME_TEMP" "$SPEC_FILE" $RPMBUILD_DIR)
-          if [ $? -ne 0 ]; then
-              >&2 echo "ERROR: failed to evaluate package '$PACKAGE_LINE'"
-              return 1
-          fi
-          # >&2 echo "spec_list_packages: PKG_NAME=$PKG_NAME"
-   
-          echo "$PKG_NAME-$VERSION-$RELEASE"
-      done
-   fi
-}
-
-spec_list_ver_rel_arch_packages () {
-   local SPEC_FILE=$1
-   local RPMBUILD_DIR=$2
-
-   local d=$(dirname $SPEC_FILE)
-   local bd=$(basename $d)
-
-   if [ "x$RPMBUILD_DIR" == "x" ];then
-      RPMBUILD_DIR=$(dirname $(dirname $SPEC_FILE))
-   fi
-
-   if [ "$bd" == "SPECS" ]; then
-       local dd=$(dirname $d)
-       spec_query_with_macros $SPEC_FILE $RPMBUILD_DIR ${TIS_PATCH_VER:-0} ${PBR_VERSION:-0} -q --qf '%{name}-%{version}-%{release}.%{arch}\n' --define="%_topdir $dd" 2>> /dev/null 
-   else
-       spec_query_with_macros $SPEC_FILE $RPMBUILD_DIR ${TIS_PATCH_VER:-0} ${PBR_VERSION:-0} -q --qf '%{name}-%{version}-%{release}.%{arch}\n' 2>> /dev/null 
-   fi
-}
-
-
-spec_match_package_list () {
-   local Aname=$1[@]
-   local TARGET_LIST=("${!Aname}")
-   local SPEC_FILE=$2
-   local RPMBUILD_DIR=$3
-   local TARGET
-   local PKG_NAME
-
-   if [ "x$RPMBUILD_DIR" == "x" ];then
-      RPMBUILD_DIR=$(dirname $(dirname $SPEC_FILE))
-   fi
-
-   for PKG_NAME in $(spec_list_packages "$SPEC_FILE" "$RPMBUILD_DIR"); do
-       for TARGET in "${TARGET_LIST[@]}"; do
-           if [ "$PKG_NAME" == "$TARGET" ]; then
-               echo $TARGET
-               return 0
-           fi
-           if [ $BUILD_TYPE == "rt" ] && [ "$PKG_NAME" == "${TARGET}-rt" ]; then
-               echo $TARGET
-               return 0
-           fi
-       done
-   done
-
-   return 1
-}
-
-
-spec_match_package () {
-   local TARGET=$1
-   local SPEC_FILE=$2
-   local RPMBUILD_DIR=$3
-   local PKG_NAME
-
-   if [ "x$RPMBUILD_DIR" == "x" ];then
-      RPMBUILD_DIR=$(dirname $(dirname $SPEC_FILE))
-   fi
-
-   for PKG_NAME in $(spec_list_packages "$SPEC_FILE" "$RPMBUILD_DIR"); do
-       if [ "$PKG_NAME" == "$TARGET" ]; then
-           echo "found target '$TARGET' in file '$SPEC_FILE' as a package name"
-           return 0
-       fi
-   done
-
-   return 1
-}
-
-spec_match_target_list () {
-   local Aname=$1[@]
-   local TARGET_LIST=("${!Aname}")
-   local SPEC_FILE=$2
-   local RPMBUILD_DIR=$3
-   local TARGET
-   local NAME
-   local SERVICE
-   local PKG_NAME
-
-   if [ "x$RPMBUILD_DIR" == "x" ];then
-      RPMBUILD_DIR=$(dirname $(dirname $SPEC_FILE))
-   fi
-
-   NAME=$(spec_find_tag Name "$SPEC_FILE" "$RPMBUILD_DIR")
-   if [ $? -eq 0 ]; then
-       for TARGET in "${TARGET_LIST[@]}"; do
-           if [ "$NAME" == "$TARGET" ]; then
-               echo $TARGET
-               return 0
-           fi
-           if [ $BUILD_TYPE == "rt" ] && [ "$NAME" == "${TARGET}-rt" ]; then
-               echo $TARGET
-               return 0
-           fi
-       done
-   fi
-
-   SERVICE=$(spec_find_global service "$SPEC_FILE" "$RPMBUILD_DIR")
-   if [ $? -eq 0 ]; then
-       for TARGET in "${TARGET_LIST[@]}"; do
-           if [ "$SERVICE" == "$TARGET" ]; then
-               echo $TARGET
-               return 0
-           fi
-           if [ $BUILD_TYPE == "rt" ] && [ "$SERVICE" == "${TARGET}-rt" ]; then
-               echo $TARGET
-               return 0
-           fi
-       done
-   fi
-
-   spec_match_package_list TARGET_LIST "$SPEC_FILE" "$RPMBUILD_DIR"
-   if [ $? -eq 0 ]; then
-       return 0
-   fi
-
-   return 1
-}
-
-
-spec_match_target () {
-   local TARGET=$1
-   local SPEC_FILE=$2
-   local RPMBUILD_DIR=$3
-   local NAME
-   local SERVICE
-   local PKG_NAME
-
-   if [ "x$RPMBUILD_DIR" == "x" ];then
-      RPMBUILD_DIR=$(dirname $(dirname $SPEC_FILE))
-   fi
-
-   NAME=$(spec_find_tag Name "$SPEC_FILE" "$RPMBUILD_DIR")
-   if [ $? -eq 0 ]; then
-       if [ "$NAME" == "$TARGET" ]; then
-           echo "found target '$TARGET' in file '$SPEC_FILE' as a name"
-           return 0
-       fi
-   fi
-
-   SERVICE=$(spec_find_global service "$SPEC_FILE" "$RPMBUILD_DIR")
-   if [ $? -eq 0 ]; then
-       if [ "$SERVICE" == "$TARGET" ]; then
-           echo "found target '$TARGET' in file '$SPEC_FILE' as a service"
-           return 0
-       fi
-   fi
-
-   spec_match_package "$TARGET" "$SPEC_FILE" "$RPMBUILD_DIR"
-   if [ $? -eq 0 ]; then
-       return 0
-   fi
-
-   return 1
-}
-
-
-spec_build_requires () {
-   local SPEC_FILE=$1
-   local RPMBUILD_DIR=$2
-
-   if [ "x$RPMBUILD_DIR" == "x" ];then
-      RPMBUILD_DIR=$(dirname $(dirname $SPEC_FILE))
-   fi
-
-   spec_find_multi_tag BuildRequires $SPEC_FILE $RPMBUILD_DIR
-}
-
-spec_untar_path () {
-   local SOURCE_NO=$1
-   local SPEC_FILE=$2
-   local RPMBUILD_DIR=$3
-
-   >&2 echo "spec_untar_path SOURCE_NO=$SOURCE_NO SPEC_FILE=$SPEC_FILE"
-   local UNTAR_PATH="."
-   local AFTER=""
-
-   if [ "x$RPMBUILD_DIR" == "x" ];then
-      RPMBUILD_DIR=$(dirname $(dirname $SPEC_FILE))
-   fi
-
-   local SETUP=$(spec_find_macro setup $SPEC_FILE $RPMBUILD_DIR)
-   AFTER=$(echo "$SETUP " | grep -o -e "[-]a[[:space:]]$SOURCE_NO[[:space:]]")
-   if [ $? -eq 0 ]; then
-      UNTAR_PATH=$(echo "$SETUP " | grep -o -e '[-]n[[:space:]][^[:space:]]*[[:space:]]' | awk '{ print $2}'; exit ${PIPESTATUS[1]})
-      if [ $? -ne 0 ]; then
-         NAME=$( spec_find_tag Name $SPEC_FILE $RPMBUILD_DIR)
-         VERSION=$(spec_find_tag Version $SPEC_FILE $RPMBUILD_DIR)
-         UNTAR_PATH="$NAME-$VERSION"
-      fi
-   fi
-   echo "$UNTAR_PATH"
-   return 0
-}
-
-
-spec_validate_tis_release () {
-   local SPEC_FILE=$1
-
-   if rpmspec --define='_tis_dist .tis' -P $SPEC_FILE 2>/dev/null | grep '^Version:' | grep '%{pbr_version}'; then
-      return 0
-   fi
-
-   # TIS Release value must include either %{?_tis_dist}.%{tis_patch_ver} or %{tis_patch_ver}%{?_tis_dist}
-   # Because spec_query_with_macros defines tis_patch_ver, we're using rpmspec directly here
-   rpmspec --define='_tis_dist .tis' -P $SPEC_FILE 2>/dev/null | grep '^Release:' \
-      | grep -qvE '\.tis\.%{tis_patch_ver}|%{tis_patch_ver}\.tis'
-   if [ $? -eq 0 ]; then
-      >&2 echo "ERROR: $SPEC_FILE: 'Release' must use %{?_tis_dist}.%{tis_patch_ver} or %{tis_patch_ver}%{?_tis_dist}"
-      >&2 grep 'Release:' $SPEC_FILE
-      return 1
-   fi
-   return 0
-}
-
diff --git a/build-tools/srpm-utils b/build-tools/srpm-utils
deleted file mode 100644
index fce9ea5d..00000000
--- a/build-tools/srpm-utils
+++ /dev/null
@@ -1,3630 +0,0 @@
-DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
-source $DIR/spec-utils
-source $DIR/classify
-
-declare -A SRPM_PKG_NAME_TO_PATH
-declare -a SRPM_PKG_NAMES
-
-declare -A STD_SRPM_PKG_NAME_TO_PATH
-declare -a STD_SRPM_PKG_NAMES
-
-
-METHOD_NO_RPMBUILD=0
-METHOD_RPMBUILD_UNPATCH=1
-METHOD_RPMBUILD_SCRIPT=2
-
-
-
-srpm_spec_find_version () {
-    local SPEC_PATH="$1"
-    local PKG_VER=$(spec_find_tag Version $SPEC_PATH 2>> /dev/null)
-
-    if [ "x$PKG_VER" == "x" ]; then
-        if [ "x$SRPM_EXPORT_VER" != "x" ]; then
-            PKG_VER="$SRPM_EXPORT_VER"
-        else
-            PKG_VER="0"
-        fi
-    fi
-    echo "$PKG_VER"
-}
-
-srpm_spec_find_name () {
-    local SPEC_PATH="$1"
-    local PKG_VER=$(spec_find_tag Name $SPEC_PATH 2>> /dev/null)
-
-    if [ "x$PKG_VER" == "x" ]; then
-        if [ "x$SRPM_EXPORT_NAME" != "x" ]; then
-            PKG_VER="$SRPM_EXPORT_NAME"
-        else
-            PKG_VER="0"
-        fi
-    fi
-    echo "$PKG_VER"
-}
-
-# Find the common root directory of a tar file.
-# This form take as input command syntax to list the tar file contents.
-# Prefered from is to use tar -tvf ... plus any additional args.
-#   - don't use x in place of t, we don't want side effects
-#   - Must use 'v' to help identif directories
-tar_cmd_common_dir () {
-   local TAR_EXTRACT_CMD="$1"
-
-   local i=1
-   local prev_path=""
-   local path
-   local count=0
-
-   path=$(eval "$TAR_EXTRACT_CMD -v" | grep '^d')
-   count=$(echo "$path" | wc -w)
-   if [ $count -gt 0 ]; then
-      i=1
-      while [ $i -lt 25 ]; do
-         path=$(eval "$TAR_EXTRACT_CMD -v" | grep '^d'  | cut -d ":" -f 2- | cut -d " " -f 2- | cut -f1-$i -d/ | uniq)
-         count=$(echo "$path" | wc -l)
-         if [ $count -gt 1 ]; then
-            echo $prev_path
-            i=0
-            break
-         else
-            prev_path=$path
-         fi
-         i=$((i + 1))
-      done
-   else
-      i=1
-      while [ $i -lt 25 ]; do
-         path=$(eval "$TAR_EXTRACT_CMD -v" | cut -d ':' -f 2- | cut -d ' ' -f 2- | rev | cut -d '/' -f 1 --complement | rev | cut -f1-$i -d/ | uniq)
-         count=$(echo "$path" | wc -l)
-         if [ $count -gt 1 ]; then
-            echo $prev_path
-            i=0
-            break
-         else
-            prev_path=$path
-         fi
-         i=$((i + 1))
-      done
-   fi
-   return $i
-}
-
-
-rpm_get_srpm () {
-    local rpm_path=$1
-    rpm -q --info --nosignature -p $rpm_path | grep '^Source RPM' | sed 's#^Source RPM  : ##'
-}
-
-rpm_get_name () {
-   local srpm_path=$1
-   rpm -q --queryformat '%{NAME}\n' --nosignature -p $srpm_path
-}
-
-rpm_get_version () {
-   local srpm_path=$1
-   rpm -q --queryformat '%{VERSION}\n' --nosignature -p $srpm_path
-}
-
-rpm_get_release () {
-   local srpm_path=$1
-   rpm -q --queryformat '%{RELEASE}\n' --nosignature -p $srpm_path
-}
-
-rpm_get_arch () {
-   local srpm_path=$1
-   rpm -q --queryformat '%{ARCH}\n' --nosignature -p $srpm_path
-}
-
-rpm_get_full_name () {
-   local srpm_path=$1
-   rpm -q --queryformat '%{NAME}-%{VERSION}-%{RELEASE}\n' --nosignature -p $srpm_path
-}
-
-
-raw_fix_if_ApplyPatch () {
-   local RAW_SCRIPT=$1
-
-   local TMP_SCRIPT=$(dirname $RAW_SCRIPT)/tmp_raw_script
-  
-   grep '^ApplyPatch ' $RAW_SCRIPT >> /dev/null
-   if [ $? -eq 0 ]; then
-      mv -f $RAW_SCRIPT $TMP_SCRIPT
-      local COUNT=0
-      while read -r LINE ; do
-         case "$LINE" in
-            "ApplyPatch "*)
-               PN=$(echo "$LINE" | awk '{print $2}')
-               COUNT=$((COUNT + 1))
-               echo "echo 'Patch #$COUNT $PN'" >> $RAW_SCRIPT
-               echo "$LINE" >> $RAW_SCRIPT
-               ;;
-            *)
-               echo "$LINE" >> $RAW_SCRIPT
-               ;;
-         esac
-      done < "$TMP_SCRIPT"
-   fi
-}
-
-srpm_create_raw_extract_script () {
-   local SPEC_FILE=$1
-   local ROOT_DIR=$2
-   local RPMBUILD_DIR=$3
-   local TARGET_ARCH=$4
-   local TIS_PATCH_VER=$5
-   local RAW_SCRIPT=$6
-   local TAR_DIR=$7
-   local PBR_VERSION=${8:-0}
-
-   echo "SPEC_FILE=$SPEC_FILE  ROOT_DIR=$ROOT_DIR  RPMBUILD_DIR=$RPMBUILD_DIR  TARGET_ARCH=$TARGET_ARCH  TIS_PATCH_VER=$TIS_PATCH_VER  RAW_SCRIPT=$RAW_SCRIPT  TAR_DIR=$TAR_DIR PBR_VERSION=$PBR_VERSION"
-   local BUILD_DIR="$RPMBUILD_DIR/BUILD"
-   local ApplyPatchCount=0
-
-   if [ ! -f $SPEC_FILE ]; then
-      >&2 echo "ERROR: $FUNCNAME (${LINENO}): file SPEC_FILE='$SPEC_FILE' does not exist"
-      return 1
-   fi
-
-   if [ ! -d $ROOT_DIR ]; then
-      >&2 echo "ERROR: $FUNCNAME (${LINENO}): directory ROOT_DIR='$ROOT_DIR' does not exist"
-      return 1
-   fi
-
-   if [ ! -d $RPMBUILD_DIR ]; then
-      >&2 echo "ERROR: $FUNCNAME (${LINENO}): directory RPMBUILD_DIR='$RPMBUILD_DIR' does not exist"
-      return 1
-   fi
-
-   mkdir -p $BUILD_DIR
-   mkdir -p $ROOT_DIR/tmp
-   local STDOUT_LOG=$(mktemp /tmp/stdout_XXXXX.log)
-   local STDERR_LOG=$(mktemp /tmp/stderr_XXXXX.log)
-   local PREV_STDOUT_LOG=$(mktemp /tmp/stdout_XXXXX.log)
-   local PREV_STDERR_LOG=$(mktemp /tmp/stderr_XXXXX.log)
-   local SAME=0
-
-   # Build the srpm as though for std build, for naming consistency
-   echo "stdbuf -oL -eL rpmbuild -bp $SPEC_FILE --root $ROOT_DIR --define='%_topdir $RPMBUILD_DIR' --define='_tis_dist .tis' --define='tis_patch_ver $TIS_PATCH_VER' --define='pbr_version $PBR_VERSION' --nodeps --target $TARGET_ARCH > $STDOUT_LOG 2> $STDERR_LOG"
-   stdbuf -oL -eL rpmbuild -bp $SPEC_FILE --root $ROOT_DIR \
-      --define="%_topdir $RPMBUILD_DIR" \
-      --define='_tis_dist .tis' \
-      --define="tis_patch_ver $TIS_PATCH_VER" \
-      --define="pbr_version $PBR_VERSION" \
-      --define="_tis_build_type $BUILD_TYPE" \
-      --nodeps --target $TARGET_ARCH > $STDOUT_LOG 2> $STDERR_LOG
-   if [ $? -ne 0 ]; then
-      >&2 echo "ERROR: $FUNCNAME (${LINENO}): rpmbuild -bp failed"
-      \rm -rf $STDOUT_LOG $STDERR_LOG $PREV_STDOUT_LOG $PREV_STDERR_LOG
-      return 1
-   fi
-
-   # The kernel-rt spec file protects against re-extraction,
-   # so we can't do multiple passes for that package.
-   # Trick the loop by setting SAME=1 to bypass it
-   if [ "$(basename $SPEC_FILE)" = "kernel-rt.spec" ]; then
-      SAME=1
-   fi
-
-   let COUNT=0
-   while [ $SAME -eq 0 ]; do
-      \cp -f $STDOUT_LOG $PREV_STDOUT_LOG
-      \cp -f $STDERR_LOG $PREV_STDERR_LOG
-      stdbuf -oL -eL rpmbuild -bp $SPEC_FILE --root $ROOT_DIR \
-         --define="%_topdir $RPMBUILD_DIR" \
-         --define='_tis_dist .tis' \
-         --define="tis_patch_ver $TIS_PATCH_VER" \
-         --define="pbr_version $PBR_VERSION" \
-         --define="_tis_build_type $BUILD_TYPE" \
-         --nodeps --target $TARGET_ARCH > $STDOUT_LOG 2> $STDERR_LOG
-      if [ $? -ne 0 ]; then
-         >&2 echo "ERROR: $FUNCNAME (${LINENO}): rpmbuild -bp failed"
-         \rm -rf $STDOUT_LOG $STDERR_LOG $PREV_STDOUT_LOG $PREV_STDERR_LOG
-         return 1
-      fi
-      diff $STDERR_LOG $PREV_STDERR_LOG
-      if [ $? -eq 0 ]; then
-         SAME=1
-      fi
-      let COUNT++
-      if [ $COUNT -ge 20 ]; then 
-         break; 
-      fi
-   done
-
-   if [ "$TAR_DIR" == "" ]; then
-       grep '^+' $STDERR_LOG | sed -e 's/^[+]* //' | grep -v "^for\>" > $RAW_SCRIPT
-   else
-       # Note: grep -v "^rm .*${TAR_DIR^}"  catches TAR_DIR that has been capitalized vs srpm name, like python
-       grep '^+' $STDERR_LOG | sed -e 's/^[+]* //' | grep -v "^for\>" | grep -v "^rm .*${TAR_DIR}" | grep -v "^rm .*${TAR_DIR^}"  > $RAW_SCRIPT
-   fi
-   raw_fix_if_ApplyPatch $RAW_SCRIPT
-   \rm -rf $STDOUT_LOG $STDERR_LOG $PREV_STDOUT_LOG $PREV_STDERR_LOG
-   return 0
-}
-
-
-##
-## Return patch file for the target patch number
-##
-raw_extract_patch_file () {
-   local RAW_SCRIPT=$1
-   local TARGET_PATCH_NO=$2
-   local SPEC_FILE=$3
-
-   local PATCH_FILE
-   local PATCH_PATH
-   if [ ! -f $RAW_SCRIPT ]; then
-      >&2 echo "ERROR: $FUNCNAME (${LINENO}): file RAW_SCRIPT='$RAW_SCRIPT' does not exist"
-      return 1
-   fi
-
-   PATCH_FILE=$(cat $RAW_SCRIPT | grep "echo 'Patch #$TARGET_PATCH_NO " | awk '{print $NF}' | sed  's#^(##' | sed "s#'\$##" | sed 's#):$##')
-   if [ "x$PATCH_FILE" == "x" ]; then
-      PATCH_PATH=$(cat $RAW_SCRIPT | grep "/usr/bin/cat " | grep "/$TARGET_PATCH_NO" | awk '{print $2}')
-      if [ "x$PATCH_PATH" == "x" ]; then
-          grep "^git am " $RAW_SCRIPT >> /dev/null
-          if [ $? -eq 0 ]; then
-             # Extract list of patches from git am command line options, then find n'th patch
-             PATCH_PATH=$(grep "^git am " $RAW_SCRIPT | tr ' ' '\n' | grep '[.]patch$' | sed -n "${TARGET_PATCH_NO}p")
-          else
-             grep "^xargs git am" $RAW_SCRIPT >> /dev/null
-             if [ $? -eq 0 ]; then
-                # Extract list of patches from spec file... assume no reordering ... then find n'th patch
-                PATCH_PATH=$(grep '^Patch[0-9]*:' $SPEC_FILE | sort -n | awk -F ':' '{ print $2}' | sed 's/^[ \t]*//' | sed 's/[ \t]*$//' | sed -n "${TARGET_PATCH_NO}p")
-             else
-                return 1
-             fi
-          fi
-      fi
-      PATCH_FILE=$(basename $PATCH_PATH)
-   fi
-
-   echo $PATCH_FILE
-   return 0
-}
-
-##
-## Create script to apply one patch
-##
-raw_create_patch_apply_script () {
-   local RAW_SCRIPT=$1
-   local TARGET_PATCH_NO=$2
-   local PATCH_SCRIPT=$3
-   local OLD_BUILD_DIR=$4
-   local NEW_BUILD_DIR=$5
-   local SPEC_FILE=$6
-   local PATCH_COUNT_TARGET=$7
-
-   local SOURCE_PATH=$(echo $OLD_BUILD_DIR | sed 's#/BUILD$#/SOURCES#')
-   local PATCH_NO=0
-   local PATCH_FILE=""
-   local PATCH_PATH=""
-
-   if [ ! -f $RAW_SCRIPT ]; then
-      >&2 echo "ERROR: $FUNCNAME (${LINENO}): file RAW_SCRIPT='$RAW_SCRIPT' does not exist"
-      return 1
-   fi
-
-   local COUNT_START=0
-   grep "echo 'Patch #$TARGET_PATCH_NO " $RAW_SCRIPT >> /dev/null
-   if [ $? -ne 0 ]; then
-      grep "/usr/bin/cat " $RAW_SCRIPT | grep "/$TARGET_PATCH_NO" >> /dev/null
-      if [ $? -ne 0 ]; then
-          # Extract list of patches from git am command line options, then find n'th patch
-          PATCH_PATH=$(grep "^git am " $RAW_SCRIPT | tr ' ' '\n' | grep '.patch$' | sed -n "${TARGET_PATCH_NO}p")
-          if [ "x$PATCH_PATH" == "x" ]; then
-              grep "^xargs git am" $RAW_SCRIPT >> /dev/null
-              if [ $? -eq 0 ] && [ "$SPEC_FILE" != "" ]; then
-                  # Extract list of patches from spec file... assume no reordering ... then find n'th patch
-                  PATCH_PATH=$(grep '^Patch[0-9]*:' $SPEC_FILE | sort -n | awk -F ':' '{ print $2}' | sed 's/^[ \t]*//' | sed 's/[ \t]*$//' | sed -n "${TARGET_PATCH_NO}p")
-                  if [ "x$PATCH_PATH" == "x" ]; then
-                     >&2 echo "ERROR: $FUNCNAME (${LINENO}): TARGET_PATCH_NO=$TARGET_PATCH_NO does not exist in RAW_SCRIPT=$RAW_SCRIPT"
-                     return 1
-                  fi
-              else
-                 >&2 echo "ERROR: $FUNCNAME (${LINENO}): TARGET_PATCH_NO=$TARGET_PATCH_NO does not exist in RAW_SCRIPT=$RAW_SCRIPT"
-                 return 1
-              fi
-          fi
-      fi
-   else
-      # We know 'echo Patch #$TARGET_PATCH_NO' exists in the file, so
-      # rig it so CAT_COUNT and PATCH_COUNT never match TARGET_PATCH_NO.
-      # CAT_COUNT and PATCH_COUNT are a fall back when patches aren't explicitly numbered.
-      COUNT_START=-20000
-   fi
-
-   if [ -f $PATCH_SCRIPT ]; then
-      \rm -rf $PATCH_SCRIPT
-   fi
-
-   echo "set -e" >> $PATCH_SCRIPT
-   echo "set -x" >> $PATCH_SCRIPT
-
-   local STATE=PRE_PATCH
-   local LAST_LINE=""
-   local LINE=""
-   local TYPICAL_PATCH=""
-   local CAT_COUNT=$COUNT_START
-   local PATCH_COUNT=$COUNT_START
-   local RC=0
-
-   PATCH_NO=0
-   PATCH_FILE=""
-   PATCH_PATH=""
-   local LAST_CD=""
-   local DD=""
-
-   while read -r LINE ; do
-       LINE=$(echo $LINE | sed -r "s#$(echo $OLD_BUILD_DIR | sed 's#/#[/]+#g')#$NEW_BUILD_DIR#g")
-       # >&2  echo "Parse: STATE=$STATE, LINE=$LINE"
-       if [[ "$LINE" == "'['"* ]]; then
-          continue
-       fi
-       case $STATE in
-          PRE_PATCH)
-             case "$LINE" in
-                 "echo 'Patch #"*)
-                     PATCH_NO=$(echo $LINE | awk '{ print $3 }' | sed 's/#//')
-                     PATCH_FILE=$(echo $LINE | awk '{ print $4 }' | sed "s/[():']//g")
-                     if [ $PATCH_NO -eq $TARGET_PATCH_NO ]; then
-                         STATE="PATCH_BEGIN"
-                         echo $LINE >> $PATCH_SCRIPT
-                     fi
-                     ;;
-                 "cat "*|\
-                 "/usr/bin/cat "*)
-                     PATCH_PATH=$(echo $LINE | awk '{ print $2 }')
-                     PATCH_FILE=$(basename $PATCH_PATH)
-                     PATCH_NO=$PATCH_FILE
-                     CAT_COUNT=$((CAT_COUNT + 1))
-                     if [ "$PATCH_NO" == "$TARGET_PATCH_NO" ] || [ "$CAT_COUNT" == "$TARGET_PATCH_NO" ] ; then
-                         STATE="PATCH"
-                         PATCH_NO=$TARGET_PATCH_NO
-                         echo "echo 'Patch #$PATCH_NO ($PATCH_FILE):'" >> $PATCH_SCRIPT
-                     fi
-                     ;;
-                 "/usr/bin/patch "*|\
-                 "patch "*)
-                     TYPICAL_PATCH="$LINE"
-                     PATCH_COUNT=$((PATCH_COUNT + 1))
-                     # >&2  echo "Parse: PATCH_COUNT=$PATCH_COUNT, PATCH_COUNT_TARGET=$PATCH_COUNT_TARGET, TARGET_PATCH_NO=$TARGET_PATCH_NO"
-                     if [ "$PATCH_COUNT" == "$TARGET_PATCH_NO" ] || [ "$PATCH_COUNT" == "$PATCH_COUNT_TARGET" ] ; then
-                         STATE="REVERSE_PATCH"
-                         PATCH_NO=$TARGET_PATCH_NO
-                     fi
-                     ;;
-                 "/usr/bin/git apply "*|\
-                 "git apply "*)
-                     TYPICAL_PATCH="$LINE"
-                     PATCH_COUNT=$((PATCH_COUNT + 1))
-                     if [ "$PATCH_COUNT" == "$TARGET_PATCH_NO" ] || [ "$PATCH_COUNT" == "$PATCH_COUNT_TARGET" ]; then
-                         STATE="REVERSE_PATCH"
-                         PATCH_NO=$TARGET_PATCH_NO
-                     fi
-                     ;;
-                 "/usr/bin/git am "*|\
-                 "git am "*)
-                     PATCH_PATH=$(grep "^git am " $RAW_SCRIPT | tr ' ' '\n' | grep '.patch$' | sed -n "${TARGET_PATCH_NO}p")
-                     if [ "x$PATCH_PATH" != "x" ]; then
-                        GIT_APPLY_ARGS=""
-                        GIT_AM_EXCLUDE_PENDING=0
-                        GIT_AM_INCLUDE_PENDING=0
-                        GIT_AM_DIRECTORY_PENDING=0
-                        GIT_AM_WHITESPACE_PENDING=0
-                        for GIT_AM_ARG in $(grep "^git am " $RAW_SCRIPT | tr ' ' '\n' | grep -v '.patch$'); do
-                           case "$GIT_AM_ARG" in
-                               "--exclude="*)
-                                   GIT_APPLY_ARGS="$GIT_APPLY_ARGS $GIT_AM_ARG"
-                                   ;;
-                               "--exclude")
-                                   GIT_AM_EXCLUDE_PENDING=1
-                                   ;;
-                               "--include="*)
-                                   GIT_APPLY_ARGS="$GIT_APPLY_ARGS $GIT_AM_ARG"
-                                   ;;
-                               "--include")
-                                   GIT_AM_INCLUDE_PENDING=1
-                                   ;;
-                               "--directory="*)
-                                   DD=$(basename $(echo "$GIT_AM_ARG" | cut -d '=' -f 2))
-                                   echo "DD=$DD, LAST_CD=$LAST_CD"
-                                   if [ "$DD" != "$LAST_CD" ]; then
-                                       GIT_APPLY_ARGS="$GIT_APPLY_ARGS $GIT_AM_ARG"
-                                   fi
-                                   ;;
-                               "--directory")
-                                   GIT_AM_DIRECTORY_PENDING=1
-                                   ;;
-                               "--whitespace="*)
-                                   GIT_APPLY_ARGS="$GIT_APPLY_ARGS $GIT_AM_ARG"
-                                   ;;
-                               "--whitespace")
-                                   GIT_AM_WHITESPACE_PENDING=1
-                                   ;;
-                               "-p"*)
-                                   GIT_APPLY_ARGS="$GIT_APPLY_ARGS $GIT_AM_ARG"
-                                   ;;
-                               "-C"*)
-                                   GIT_APPLY_ARGS="$GIT_APPLY_ARGS $GIT_AM_ARG"
-                                   ;;
-                               "--ignore-space-change")
-                                   GIT_APPLY_ARGS="$GIT_APPLY_ARGS $GIT_AM_ARG"
-                                   ;;
-                               "--ignore-whitespace")
-                                   GIT_APPLY_ARGS="$GIT_APPLY_ARGS $GIT_AM_ARG"
-                                   ;;
-                               *)
-                                   if [ $GIT_AM_EXCLUDE_PENDING -eq 1 ]; then
-                                       GIT_AM_EXCLUDE_PENDING=0
-                                       GIT_APPLY_ARGS="$GIT_APPLY_ARGS --exclude=$GIT_AM_ARG"
-                                   fi
-                                   if [ $GIT_AM_INCLUDE_PENDING -eq 1 ]; then
-                                       GIT_AM_INCLUDE_PENDING=0
-                                       GIT_APPLY_ARGS="$GIT_APPLY_ARGS --include=$GIT_AM_ARG"
-                                   fi
-                                   if [ $GIT_AM_DIRECTORY_PENDING -eq 1 ]; then
-                                       GIT_AM_DIRECTORY_PENDING=0
-                                       DD=$(basename $(echo "$GIT_AM_ARG" | cut -d '=' -f 2))
-                                       echo "DD=$DD, LAST_CD=$LAST_CD"
-                                       if [ "$DD" != "$LAST_CD" ]; then
-                                           GIT_APPLY_ARGS="$GIT_APPLY_ARGS --directory=$GIT_AM_ARG"
-                                       fi
-                                   fi
-                                   if [ $GIT_AM_WHITESPACE_PENDING -eq 1 ]; then
-                                       GIT_AM_WHITESPACE_PENDING=0
-                                       GIT_APPLY_ARGS="$GIT_APPLY_ARGS --whitespace=$GIT_AM_ARG"
-                                   fi
-                                   ;;
-                           esac
-                        done
-                        PATCH_FILE=$(basename $PATCH_PATH)
-                        PATCH_NO=$TARGET_PATCH_NO
-                        echo "echo 'Patch #$PATCH_NO ($PATCH_FILE):'" >> $PATCH_SCRIPT
-                        # >&2 echo "echo GIT_APPLY_ARGS=$GIT_APPLY_ARGS"
-                        if [ "$GIT_APPLY_ARGS" == "" ]; then
-                           echo "cat $PATCH_PATH | patch -p1" >> $PATCH_SCRIPT
-                        else
-                           echo "git apply $GIT_APPLY_ARGS $PATCH_PATH" >> $PATCH_SCRIPT
-                        fi
-                        STATE="POST_PATCH"
-                     fi
-                     ;;
-                 "xargs git am"*)
-                     PATCH_SRC_DIR="$(dirname $(dirname $SPEC_FILE))/SOURCES"
-                     PATCH_PATH=$(grep '^Patch[0-9]*:' $SPEC_FILE | sort -n | awk -F ':' '{ print $2}' | sed 's/^[ \t]*//' | sed 's/[ \t]*$//' | sed -n "${TARGET_PATCH_NO}p" | sed "s#^#$PATCH_SRC_DIR/#")
-                     if [ "x$PATCH_PATH" != "x" ]; then
-                        PATCH_FILE=$(basename $PATCH_PATH)
-                        PATCH_NO=$TARGET_PATCH_NO
-                        echo "echo 'Patch #$PATCH_NO ($PATCH_FILE):'" >> $PATCH_SCRIPT
-                        echo "cat $PATCH_PATH | patch -p1" >> $PATCH_SCRIPT
-                        STATE="POST_PATCH"
-                     fi
-                     ;;
-                 "cd "*|\
-                 "popd"*|\
-                 "pushd "*)
-                     echo $LINE >> $PATCH_SCRIPT
-                     LAST_CD=$(basename $(echo $LINE | cut -d ' ' -f2-))
-                     ;;
-                 *)
-                     ;;
-             esac
-             ;;
-          PATCH_BEGIN)
-             case "$LINE" in
-                 "cat "*|\
-                 "/usr/bin/cat "*)
-                     STATE="PATCH"
-                     CAT_COUNT=$((CAT_COUNT + 1))
-                     PATCH_PATH=$(echo $LINE | awk '{ print $2 }')
-                     ;;
-                "/usr/bin/patch "*|\
-                "patch "*)
-                     STATE="REVERSE_PATCH"
-                     PATCH_COUNT=$((PATCH_COUNT + 1))
-                     TYPICAL_PATCH="$LINE"
-                     ;;
-                "/usr/bin/git apply "*|\
-                "git apply "*)
-                     STATE="REVERSE_PATCH"
-                     PATCH_COUNT=$((PATCH_COUNT + 1))
-                     TYPICAL_PATCH="$LINE"
-                     ;;
-                "ApplyPatch "*)
-                     STATE="APPLYPATCH"
-                     PATCH_PATH=$(echo $LINE | awk '{ print $2 }')
-                     if [ ! -f $PATCH_PATH ]; then
-                        PATCH_PATH="$SOURCE_PATH/$PATCH_PATH"
-                     fi
-                     ;;
-                 *)
-                     >&2 echo "ERROR: $FUNCNAME (${LINENO}): Unexpected Line in state PATCH_BEGIN: $LINE"
-                     RC=1
-                     break
-                     ;;
-             esac
-             ;;
-          APPLYPATCH)
-             case "$LINE" in
-                 "/usr/bin/patch "*|\
-                 "patch "*)
-                     STATE="POST_PATCH"
-                     PATCH_COUNT=$((PATCH_COUNT + 1))
-                     echo "/usr/bin/cat $PATCH_PATH | $LINE" >> $PATCH_SCRIPT
-                     ;;
-                 "/usr/bin/git apply "*|\
-                 "git apply "*)
-                     STATE="POST_PATCH"
-                     echo "/usr/bin/cat $PATCH_PATH | $LINE" >> $PATCH_SCRIPT
-                     ;;
-                 *)
-                     ;;
-             esac
-             ;;
-          PATCH)
-             case "$LINE" in
-                 "/usr/bin/patch "*|\
-                 "patch "*)
-                     STATE="POST_PATCH"
-                     TYPICAL_PATCH="$LINE"
-                     PATCH_COUNT=$((PATCH_COUNT + 1))
-                     echo "$LAST_LINE | $LINE" >> $PATCH_SCRIPT
-                     ;;
-                 "/usr/bin/git apply "*|\
-                 "git apply "*)
-                     STATE="POST_PATCH"
-                     TYPICAL_PATCH="$LINE"
-                     echo "$LAST_LINE | $LINE" >> $PATCH_SCRIPT
-                     ;;
-                 "echo 'Patch #"*)
-                     STATE="POST_PATCH"
-                     if [ "x$TYPICAL_PATCH" != "x" ];then
-                        echo "$LAST_LINE | $TYPICAL_PATCH" >> $PATCH_SCRIPT
-                     else
-                        >&2 echo "ERROR: $FUNCNAME (${LINENO}): Unexpected Line in state PATCH: $LINE"
-                        RC=1
-                        break
-                     fi
-                     ;;
-                 *)
-                     >&2 echo "WARNING: * TYPICAL_PATCH=$TYPICAL_PATCH"
-                     >&2 echo "ERROR: $FUNCNAME (${LINENO}): Unexpected Line in state PATCH: $LINE"
-                     RC=1
-                     break
-                     ;;
-             esac
-             ;;
-          REVERSE_PATCH)
-             case "$LINE" in
-                 "cat "*|\
-                 "/usr/bin/cat "*)
-                     STATE="POST_PATCH"
-                     CAT_COUNT=$((CAT_COUNT + 1))
-                     PATCH_PATH=$(echo $LINE | awk '{ print $2 }')
-                     PATCH_FILE=$(basename $PATCH_PATH)
-                     echo "echo 'Patch #$PATCH_NO ($PATCH_FILE):'" >> $PATCH_SCRIPT
-                     echo "$LINE | $LAST_LINE" >> $PATCH_SCRIPT
-                     ;;
-                 *)
-                     # Not sure why, but the 'cat' line gets dropped on rare and hard to reproduce occasions.
-                     # Recreate it here if we can.
-                     PATCH_PATH="$SOURCE_PATH/PATCH_FILE"
-                     if [ -f "$PATCH_PATH" ]; then
-                        >&2 echo "ERROR: $FUNCNAME (${LINENO}): Assuming PATCH_PATH=$PATCH_PATH"
-                        STATE="POST_PATCH"
-                        echo "/usr/bin/cat $PATCH_PATH | $LAST_LINE" >> $PATCH_SCRIPT
-                     else
-                        >&2 echo "ERROR: $FUNCNAME (${LINENO}): Unexpected Line in state REVERSE_PATCH: $LINE"
-                        RC=1
-                        break
-                     fi
-                     ;;
-             esac
-             ;;
-          POST_PATCH)
-             case "$LINE" in
-                 "cd "*|\
-                 "popd"*|\
-                 "pushd "*)
-                     echo $LINE >> $PATCH_SCRIPT
-                     ;;
-                 *)
-                     ;;
-             esac
-             ;;
-   
-      esac
-      LAST_LINE="$LINE"
-   done < "$RAW_SCRIPT"
-
-   return $RC
-}
-
-##
-## script to extract tarballs 
-##
-raw_create_tarballs_extract_script () {
-   local RAW_SCRIPT=$1
-   local EXTRACT_SCRIPT=$2
-   local OLD_BUILD_DIR=$3
-   local NEW_BUILD_DIR=$4
-
-   if [ ! -f $RAW_SCRIPT ]; then
-      >&2 echo "ERROR: $FUNCNAME (${LINENO}): file RAW_SCRIPT='$RAW_SCRIPT' does not exist"
-      return 1
-   fi
-
-   if [ -f $EXTRACT_SCRIPT ]; then
-      \rm -rf $EXTRACT_SCRIPT
-   fi
-
-   local STATE="PRE_PATCH"
-   local LAST_LINE=""
-   local RC=0
-   local FIRST_TAR=0
-   local EXTRACT_DIR=""
-   local EXTRACT_TAR_DIR=""
-   local EXTRACT_TAR_DIR_NOW=""
-   local MV_DEST=""
-   local CURR_DIR=""
-   local PREV_DIR=""
-   local DEST
-   local TAR_ARGS
-   local POST_PATCH_FIRST_PASS=0
-   local KVERSION=""
-
-   # get version for kernel-rt
-   if [[ $OLD_BUILD_DIR =~ kernel-rt ]]; then
-      KVERSION=$PKG_VER
-   fi
-
-   echo "set -e" >> $EXTRACT_SCRIPT
-   echo "set -x" >> $EXTRACT_SCRIPT
-
-   while read -r LINE ; do
-      LINE=$(echo $LINE | sed -r "s#$(echo $OLD_BUILD_DIR | sed 's#/#[/]+#g')#$NEW_BUILD_DIR#g")
-      # >&2 echo "Parse: STATE=$STATE, LINE=$LINE"
-      if [[ "$LINE" == "'['"* ]]; then
-         # kernel-rt hack
-         if [[ "$LINE" == "'[' -L vanilla-3.10.0/configs ']'" ]]; then
-            echo "if [ -L vanilla-3.10.0/configs ]; then rm -f vanilla-3.10.0/configs; fi" >> $EXTRACT_SCRIPT
-         fi
-         # kernel hack
-         if [[ "$LINE" == "'[' -L configs ']'" ]]; then
-            echo "if [ -L configs ]; then rm -f configs; fi" >> $EXTRACT_SCRIPT
-         fi
-         continue
-      fi
-      case $STATE in
-         PRE_PATCH)
-            case "$LINE" in
-                "ApplyOptionalPatch"*|\
-                "ApplyPatch"*|\
-                "echo 'Patch #"*)
-                    STATE="POST_PATCH"
-                    ;;
-                "gzip -dc "*|\
-                "xz -dc "*|\
-                "bzip2 -dc "*|\
-                "/usr/bin/gzip -dc "*|\
-                "/usr/bin/xz -dc "*|\
-                "/usr/bin/bzip2 -dc "*)
-                    STATE="TAR"
-                    ;;
-                "tar -xf -"|\
-                "tar -xvf -"|\
-                "tar -xvvf -"|\
-                "tar -xo -f -"|\
-                "/usr/bin/tar -xf -"|\
-                "/usr/bin/tar -xvf -"|\
-                "/usr/bin/tar -xvvf -"|\
-                "/usr/bin/tar -xo -f -") 
-                    LINE="$LINE --exclude .git"
-                    STATE="REVERSE_TAR"
-                    ;;
-                "tar -xf "*|\
-                "tar -xvf "*|\
-                "tar -xvvf "*|\
-                "tar -xo -f "*|\
-                "/usr/bin/tar -xf "*|\
-                "/usr/bin/tar -xvf "*|\
-                "/usr/bin/tar -xvvf "*|\
-                "/usr/bin/tar -xo -f "*)
-                    echo "$LINE --exclude .git" >> $EXTRACT_SCRIPT
-                    if [ $FIRST_TAR -eq 0 ]; then
-                       TAR_ARGS=$(echo $LINE | sed -e 's#^/usr/bin/tar ##' -e 's#^tar ##' -e 's#^-xf ##' -e 's#^-xvf ##' -e 's#^-xvvf ##' -e 's#^-xo -f ##')
-                       EXTRACT_DIR=$(dirname $EXTRACT_SCRIPT)/extract_dir
-                       EXTRACT_TAR_DIR=$(dirname $EXTRACT_SCRIPT)/tar_dir
-                       EXTRACT_TAR_DIR_NOW=$(tar_cmd_common_dir "tar -tvf $TAR_ARGS")
-                       echo "readlink -f \$(pwd) > $EXTRACT_DIR" >> $EXTRACT_SCRIPT
-                    fi
-                    FIRST_TAR=1
-                    ;;
-
-                "git am "*)
-                    STATE="POST_PATCH"
-                    ;;
-                "xargs git am"*)
-                    STATE="POST_PATCH"
-                    ;;
-                "/usr/bin/patch "*|\
-                "patch "*)
-                    STATE="POST_PATCH"
-                    ;;
-                "/usr/bin/git apply "*|\
-                "git apply "*)
-                    STATE="POST_PATCH"
-                    ;;
-                "mv $EXTRACT_TAR_DIR_NOW "*)
-                    if [ "x$EXTRACT_TAR_DIR_NOW" == "x" ]; then
-                       echo "$LINE" >> $EXTRACT_SCRIPT
-                    else
-                       MV_DEST=$(echo "$LINE" | awk '{ print $NF}' )
-                       MV_SRC=$(echo "$LINE" | awk '{ print $(NF-1)}' )
-                       echo "if [ ! -L $MV_DEST ]; then if [ -d $MV_DEST ]; then if [ ! -L $MV_DEST/$EXTRACT_TAR_DIR_NOW ]; then ln -s ../$EXTRACT_TAR_DIR_NOW $MV_DEST/$EXTRACT_TAR_DIR_NOW; fi; else ln -s $EXTRACT_TAR_DIR_NOW $MV_DEST; fi; fi" >> $EXTRACT_SCRIPT
-                    fi
-                    ;;
-                "cd "*)
-                    DEST=$(echo "$LINE" | awk '{ print $NF}' )
-                    case "$DEST" in
-                        "/"*)
-                            CURR_DIR="$DEST"
-                            ;;
-                        *)
-                            CURR_DIR="$CURR_DIR/$DEST"
-                            ;;
-                    esac
-                  
-                    echo "$LINE" >> $EXTRACT_SCRIPT
-                    ;;
-                "pushd "*)
-                    DEST=$(echo "$LINE" | awk '{ print $NF}' )
-                    PREV_DIR="$CURR_DIR"
-                    case "$DEST" in
-                        "/"*)
-                            CURR_DIR="$DEST"
-                            ;;
-                        *)
-                            CURR_DIR="$CURR_DIR/$DEST"
-                            ;;
-                    esac
-                    echo "$LINE" >> $EXTRACT_SCRIPT
-                    ;;
-                "popd"*)
-                    CURR_DIR="$PREV_DIR"
-                    echo "$LINE" >> $EXTRACT_SCRIPT
-                    ;;
-                "cp "*)
-                    DEST=$(echo "$LINE" | awk '{ print $NF}' )
-                    CPY_SRC=$(echo "$LINE" | awk '{ print $(NF-1)}' )
-                    if [ "$DEST" == "linux-3.10.0.x86_64" ] && [ "$CPY_SRC" == "vanilla-3.10.0" ]; then
-                       # special case for kernel-rt
-                       echo "if [ ! -L "$DEST" ]; then" >> $EXTRACT_SCRIPT
-                       echo "   ln -s $CPY_SRC $DEST" >> $EXTRACT_SCRIPT
-                       echo "fi" >> $EXTRACT_SCRIPT
-                    else
-                       echo "$LINE" >> $EXTRACT_SCRIPT
-                    fi
-                    ;;
-                "/usr/bin/mkdir "*|\
-                "mkdir "*)
-                    echo "$LINE -p" >> $EXTRACT_SCRIPT
-                    ;;
-                "exit "*)
-                    ;;
-                "grep "*)
-                    ;;
-                "xargs "*)
-                    ;;
-                "wc "*)
-                    ;;
-                "git init "*|\
-                "git config "*|\
-                "git add "*|\
-                "git commit "*)
-                    ;;
-                "rm -rf "*)
-                    ;;
-                "VERSION=$KVERSION"*)
-                    # for kernel-rt
-                    echo "export $LINE" >> $EXTRACT_SCRIPT
-                    ;;
-                *)
-                    echo "$LINE" >> $EXTRACT_SCRIPT
-                    ;;
-            esac
-            ;;
-         REVERSE_TAR)
-            case "$LINE" in
-                "gzip -dc "*|\
-                "xz -dc "*|\
-                "bzip2 -dc "*|\
-                "/usr/bin/gzip -dc "*|\
-                "/usr/bin/xz -dc "*|\
-                "/usr/bin/bzip2 -dc "*)
-                    STATE="PRE_PATCH"
-                    echo "$LINE | $LAST_LINE" >> $EXTRACT_SCRIPT
-                    if [ $FIRST_TAR -eq 0 ]; then
-                       EXTRACT_DIR=$(dirname $EXTRACT_SCRIPT)/extract_dir
-                       EXTRACT_TAR_DIR=$(dirname $EXTRACT_SCRIPT)/tar_dir
-                       EXTRACT_TAR_DIR_NOW=$(tar_cmd_common_dir "$LINE | tar -tvf -")
-                       echo "readlink -f \$(pwd) > $EXTRACT_DIR" >> $EXTRACT_SCRIPT
-                    fi
-                    FIRST_TAR=1
-                    ;;
-                *)
-                    >&2 echo "ERROR: $FUNCNAME (${LINENO}): Unexpected Line in state REVERSE_TAR: $LINE"
-                    RC=1
-                    break
-                    ;;
-            esac
-            ;;
-         TAR)
-            case "$LINE" in
-                "tar -xf -"|\
-                "tar -xvf -"|\
-                "tar -xvvf -"|\
-                "tar -xo -f -"|\
-                "/usr/bin/tar -xf -"|\
-                "/usr/bin/tar -xvf -"|\
-                "/usr/bin/tar -xvvf -"|\
-                "/usr/bin/tar -xo -f -")
-                    STATE="PRE_PATCH"
-                    echo "$LAST_LINE | $LINE --exclude .git" >> $EXTRACT_SCRIPT
-                    if [ $FIRST_TAR -eq 0 ]; then
-                       EXTRACT_DIR=$(dirname $EXTRACT_SCRIPT)/extract_dir
-                       EXTRACT_TAR_DIR=$(dirname $EXTRACT_SCRIPT)/tar_dir
-                       EXTRACT_TAR_DIR_NOW=$(tar_cmd_common_dir "$LAST_LINE | tar -tvf -")
-                       echo "readlink -f \$(pwd) > $EXTRACT_DIR" >> $EXTRACT_SCRIPT
-                    fi
-                    FIRST_TAR=1
-                    ;;
-                "exit "*)
-                    ;;
-                *)
-                    >&2 echo "ERROR: $FUNCNAME (${LINENO}): Unexpected Line in state TAR: $LINE"
-                    RC=1
-                    break
-                    ;;
-            esac
-            ;;
-         POST_PATCH)
-            if [ $POST_PATCH_FIRST_PASS -eq 0 ]; then
-               POST_PATCH_FIRST_PASS=1
-               PATCH_DIR=$(dirname $EXTRACT_SCRIPT)/patch_dir
-               echo "readlink -f \$(pwd) > $PATCH_DIR" >> $EXTRACT_SCRIPT
-               readlink -f $(pwd)
-            fi
-            case "$LINE" in
-                "cd "*|\
-                "popd"*|\
-                "pushd "*)
-                    echo $LINE >> $EXTRACT_SCRIPT
-                    ;;
-                "iconv"*)
-                    local ICONV_LAST_ARG=$(echo $LINE | awk '{ print $NF }')
-                    local ICONV_SECOND_LAST_ARG=$(echo $LINE | awk '{ print $(NF-1) }')
-                    if [ "$ICONV_SECOND_LAST_ARG" == "utf-8" ]; then
-                       # shadow-utils hack
-                       echo "$LINE > $ICONV_LAST_ARG.utf8" >> $EXTRACT_SCRIPT
-                    fi
-                    ;;
-                "cp "*)
-                    DEST=$(echo "$LINE" | awk '{ print $NF}' )
-                    CPY_SRC=$(echo "$LINE" | awk '{ print $(NF-1)}' )
-                    if [ "$DEST" == "linux-3.10.0.x86_64" ] && [ "$CPY_SRC" == "vanilla-3.10.0" ]; then
-                       # special case for kernel-rt
-                       echo "if [ ! -L "$DEST" ]; then" >> $EXTRACT_SCRIPT
-                       echo "   ln -s $CPY_SRC $DEST" >> $EXTRACT_SCRIPT
-                       echo "fi" >> $EXTRACT_SCRIPT
-                    else
-                       echo "$LINE" >> $EXTRACT_SCRIPT
-                    fi
-                    ;;
-                "/usr/bin/mkdir "*|\
-                "mkdir "*)
-                    echo "$LINE -p" >> $EXTRACT_SCRIPT
-                    ;;
-                "exit "*)
-                    ;;
-                *)
-                    ;;
-            esac
-            ;;
-      esac
-      LAST_LINE="$LINE"
-   done < "$RAW_SCRIPT"
-
-   if [ $POST_PATCH_FIRST_PASS -eq 0 ]; then
-      PATCH_DIR=$(dirname $EXTRACT_SCRIPT)/patch_dir
-      echo "readlink -f \$(pwd) > $PATCH_DIR" >> $EXTRACT_SCRIPT
-      readlink -f $(pwd)
-   fi
-
-   return $RC
-}
-
-##
-## script to extract tarballs after metapatchs
-## ok, not really extracting a tarball, just set up symlink if required 
-##
-raw_create_tarballs_extract_script_post_metapatch () {
-   local RAW_SCRIPT=$1
-   local EXTRACT_SCRIPT=$2
-   local OLD_BUILD_DIR=$3
-   local NEW_BUILD_DIR=$4
-
-   if [ ! -f $RAW_SCRIPT ]; then
-      >&2 echo "ERROR: $FUNCNAME (${LINENO}): file RAW_SCRIPT='$RAW_SCRIPT' does not exist"
-      return 1
-   fi
-
-   if [ -f $EXTRACT_SCRIPT ]; then
-      \rm -rf $EXTRACT_SCRIPT
-   fi
-
-   local STATE="PRE_PATCH"
-   local LAST_LINE=""
-   local RC=0
-   local FIRST_TAR=0
-   local EXTRACT_DIR=""
-   local EXTRACT_TAR_DIR=""
-   local EXTRACT_TAR_DIR_NOW=""
-   local MV_DEST=""
-   local TAR_ARGS
-
-   echo "set -e" >> $EXTRACT_SCRIPT
-   echo "set -x" >> $EXTRACT_SCRIPT
-
-   while read -r LINE ; do
-      LINE=$(echo $LINE | sed -r "s#$(echo $OLD_BUILD_DIR | sed 's#/#[/]+#g')#$NEW_BUILD_DIR#g")
-      # >&2 echo "Parse: STATE=$STATE, LINE=$LINE"
-      if [[ "$LINE" == "'['"* ]]; then
-         # kernel-rt hack
-         if [[ "$LINE" == "'[' -L vanilla-3.10.0/configs ']'" ]]; then
-            echo "if [ -L vanilla-3.10.0/configs ]; then rm -f vanilla-3.10.0/configs; fi" >> $EXTRACT_SCRIPT
-         fi
-         # kernel hack
-         if [[ "$LINE" == "'[' -L configs ']'" ]]; then
-            echo "if [ -L configs ]; then rm -f configs; fi" >> $EXTRACT_SCRIPT
-         fi
-         continue
-      fi
-      case $STATE in
-         PRE_PATCH)
-            case "$LINE" in
-                "ApplyOptionalPatch"*|\
-                "ApplyPatch"*|\
-                "echo 'Patch #"*)
-                    STATE="POST_PATCH"
-                    ;;
-                "gzip -dc "*|\
-                "xz -dc "*|\
-                "bzip2 -dc "*|\
-                "/usr/bin/gzip -dc "*|\
-                "/usr/bin/xz -dc "*|\
-                "/usr/bin/bzip2 -dc "*)
-                    STATE="TAR"
-                    ;;
-                "tar -xf -"|\
-                "tar -xvf -"|\
-                "tar -xvvf -"|\
-                "tar -xo -f -"|\
-                "/usr/bin/tar -xf -"|\
-                "/usr/bin/tar -xvf -"|\
-                "/usr/bin/tar -xvvf -"|\
-                "/usr/bin/tar -xo -f -") 
-                    STATE="REVERSE_TAR"
-                    ;;
-                "tar -xf "*|\
-                "tar -xvf "*|\
-                "tar -xvvf "*|\
-                "tar -xo -f "*|\
-                "/usr/bin/tar -xf "*|\
-                "/usr/bin/tar -xvf "*|\
-                "/usr/bin/tar -xvvf "*|\
-                "/usr/bin/tar -xo -f "*)
-                    LINE="$LINE --exclude .git"
-                    if [ $FIRST_TAR -eq 0 ]; then
-                       TAR_ARGS=$(echo $LINE | sed -e 's#^/usr/bin/tar ##' -e 's#^-xf ##' -e 's#^-xvf ##' -e 's#^-xvvf ##' -e 's#^-xo -f ##')
-                       EXTRACT_DIR=$(dirname $EXTRACT_SCRIPT)/extract_dir
-                       EXTRACT_TAR_DIR=$(dirname $EXTRACT_SCRIPT)/tar_dir
-                       EXTRACT_TAR_DIR_NOW=$(tar_cmd_common_dir "tar -tvf $TAR_ARGS")
-                    fi
-                    FIRST_TAR=1
-                    ;;
-                "git am "*)
-                    STATE="POST_PATCH"
-                    ;;
-                "xargs git am"*)
-                    STATE="POST_PATCH"
-                    ;;
-                "/usr/bin/patch "*|\
-                "patch "*)
-                    STATE="POST_PATCH"
-                    ;;
-                "/usr/bin/git apply "*|\
-                "git apply "*)
-                    STATE="POST_PATCH"
-                    ;;
-                "mv $EXTRACT_TAR_DIR_NOW "*)
-                    if [ "x$EXTRACT_TAR_DIR_NOW" == "x" ]; then
-                       echo "" >> $EXTRACT_SCRIPT
-                    else
-                       MV_DEST=$(echo "$LINE" | awk '{ print $NF}' )
-                       MV_SRC=$(echo "$LINE" | awk '{ print $(NF-1)}' )
-                       echo "if [ ! -L $MV_DEST ]; then if [ -d $MV_DEST ]; then if [ ! -L $MV_DEST/$EXTRACT_TAR_DIR_NOW ]; then ln -s ../$EXTRACT_TAR_DIR_NOW $MV_DEST/$EXTRACT_TAR_DIR_NOW; fi; else ln -s $EXTRACT_TAR_DIR_NOW $MV_DEST; fi; fi" >> $EXTRACT_SCRIPT
-                    fi
-                    ;;
-                "cd "*|\
-                "popd"*|\
-                "pushd "*)
-                    echo "$LINE" >> $EXTRACT_SCRIPT
-                    ;;
-                "/usr/bin/mkdir "*|\
-                "mkdir "*)
-                    echo "$LINE -p" >> $EXTRACT_SCRIPT
-                    ;;
-                "grep "*)
-                    ;;
-                *)
-                    ;;
-            esac
-            ;;
-         REVERSE_TAR)
-            case "$LINE" in
-                "gzip -dc "*|\
-                "xz -dc "*|\
-                "bzip2 -dc "*|\
-                "/usr/bin/gzip -dc "*|\
-                "/usr/bin/xz -dc "*|\
-                "/usr/bin/bzip2 -dc "*)
-                    STATE="PRE_PATCH"
-                    if [ $FIRST_TAR -eq 0 ]; then
-                       EXTRACT_DIR=$(dirname $EXTRACT_SCRIPT)/extract_dir
-                       EXTRACT_TAR_DIR=$(dirname $EXTRACT_SCRIPT)/tar_dir
-                       EXTRACT_TAR_DIR_NOW=$(tar_cmd_common_dir "$LINE | tar -tvf -")
-                    fi
-                    FIRST_TAR=1
-                    ;;
-                *)
-                    >&2 echo "ERROR: $FUNCNAME (${LINENO}): Unexpected Line in state REVERSE_TAR: $LINE"
-                    RC=1
-                    break
-                    ;;
-            esac
-            ;;
-         TAR)
-            case "$LINE" in
-                "tar -xf -"|\
-                "tar -xvf -"|\
-                "tar -xvvf -"|\
-                "tar -xo -f -"|\
-                "/usr/bin/tar -xf -"|\
-                "/usr/bin/tar -xvf -"|\
-                "/usr/bin/tar -xvvf -"|\
-                "/usr/bin/tar -xo -f -")
-                    LINE="$LINE --exclude .git"
-                    STATE="PRE_PATCH"
-                    if [ $FIRST_TAR -eq 0 ]; then
-                       EXTRACT_DIR=$(dirname $EXTRACT_SCRIPT)/extract_dir
-                       EXTRACT_TAR_DIR=$(dirname $EXTRACT_SCRIPT)/tar_dir
-                       EXTRACT_TAR_DIR_NOW=$(tar_cmd_common_dir "$LAST_LINE | tar -tvf -")
-                    fi
-                    FIRST_TAR=1
-                    ;;
-                *)
-                    >&2 echo "ERROR: $FUNCNAME (${LINENO}): Unexpected Line in state TAR: $LINE"
-                    RC=1
-                    break
-                    ;;
-            esac
-            ;;
-         POST_PATCH)
-            case "$LINE" in
-                "cd "*|\
-                "popd"*|\
-                "pushd "*)
-                    echo $LINE >> $EXTRACT_SCRIPT
-                    ;;
-                "/usr/bin/mkdir "*|\
-                "mkdir "*)
-                    echo "$LINE -p" >> $EXTRACT_SCRIPT
-                    ;;
-                *)
-                    ;;
-            esac
-            ;;
-      esac
-      LAST_LINE="$LINE"
-   done < "$RAW_SCRIPT"
-
-   return $RC
-}
-
-
-##
-## script to list patch numbers
-##
-raw_patch_order () {
-   local RAW_SCRIPT=$1
-   local SPEC_FILE=$2
-   local LINE
-   local LINE2
-   local PATCH_NO=0
-
-   if [ ! -f $RAW_SCRIPT ]; then
-      >&2 echo "ERROR: $FUNCNAME (${LINENO}): file RAW_SCRIPT='$RAW_SCRIPT' does not exist"
-      return 1
-   fi
-
-
-   while read -r LINE ; do
-      if [[ "$LINE" == "'['"* ]]; then
-         continue
-      fi
-      case "$LINE" in
-          "echo 'Patch #"*)
-              PATCH_NO=$(echo $LINE | awk '{ print $3 }' | sed 's/#//')
-              echo $PATCH_NO
-              ;;
-          "git am "*)
-              for LINE2 in $(echo $LINE | tr ' ' '\n' | grep '.patch$'); do
-                 PATCH_NO=$((PATCH_NO + 1))
-                 echo $PATCH_NO
-              done
-              ;;
-          "xargs git am"*)
-              grep '^Patch[0-9]*:' $SPEC_FILE |\
-              while read -r LINE2; do
-                 PATCH_NO=$((PATCH_NO + 1))
-                 echo $PATCH_NO
-              done
-              ;;
-          *)
-              ;;
-      esac
-   done < "$RAW_SCRIPT"
-
-   if [ $PATCH_NO -eq 0 ]; then
-      while read -r LINE ; do
-         if [[ "$LINE" == "'['"* ]]; then
-            continue
-         fi
-         case "$LINE" in
-             "cat "*|\
-             "/usr/bin/cat "*)
-                 PATCH_PATH=$(echo $LINE | awk '{ print $2 }')
-                 PATCH_FILE=$(basename $PATCH_PATH)
-                 PATCH_NO=$PATCH_FILE
-                 echo $PATCH_NO
-                 ;;
-             *)
-                 ;;
-         esac
-      done < "$RAW_SCRIPT"
-   fi
-
-   return 0
-}
-
-srpm_build_dictionary () {
-   local srpm_dir=$1
-   local srpm_path
-   local name
-
-   for srpm_path in $(find $srpm_dir -name '*.src.rpm' | sort -V); do
-      name=$(rpm_get_name $srpm_path)
-      SRPM_PKG_NAME_TO_PATH[$name]="$srpm_path"
-      SRPM_PKG_NAMES+=("$name")
-   done
-}
-
-srpm_build_std_dictionary () {
-   local srpm_dir=$1
-   local srpm_path
-   local name
-
-   for srpm_path in $(find $srpm_dir -name '*.src.rpm' | sort -V); do
-      name=$(rpm_get_name $srpm_path)
-      STD_SRPM_PKG_NAME_TO_PATH[$name]="$srpm_path"
-      STD_SRPM_PKG_NAMES+=("$name")
-   done
-}
-
-srpm_assemble () {
-   local FULL_BUILD_DIR=$1
-   local TIS_PATCH_VER=$2
-   local PBR_VERSION=$3
-
-   local SPEC_PATH
-   local SPEC
-   local SRPM_PATH
-   local SRPM
-   local NAME
-   local VERSION
-   local RELEASE
-   local BUILD_NEEDED
-
-   for SPEC in $(cd $FULL_BUILD_DIR/SPECS/; ls -1 *.spec); do
-      SPEC_PATH="$FULL_BUILD_DIR/SPECS/$SPEC"
-      NAME=$(srpm_spec_find_name "$SPEC_PATH" 2>> /dev/null)
-      if [ $? -ne 0 ]; then
-          echo "ERROR: $FUNCNAME (${LINENO}): 'Name' not found in '$SPEC_PATH'"
-      fi
-
-      VERSION=$(srpm_spec_find_version "$SPEC_PATH" 2>> /dev/null)
-      if [ $? -ne 0 ]; then
-          echo "ERROR: $FUNCNAME (${LINENO}): 'Version' not found in '$SPEC_PATH'"
-          if [ "x$SRPM_EXPORT_NAME" != "x" ]; then
-              VERSION="$SRPM_EXPORT_NAME"
-          else
-              VERSION="0"
-          fi
-      fi
-
-      RELEASE=$(spec_find_tag Release "$SPEC_PATH" "$(dirname $(dirname $SPEC_PATH))" "$TIS_PATCH_VER" "$PBR_VERSION" 2>> /dev/null)
-      if [ $? -ne 0 ]; then
-          echo "ERROR: $FUNCNAME (${LINENO}): 'Release' not found in '$SPEC_PATH'"
-          RELEASE="0"
-      fi
-
-      SRPM="$NAME-$VERSION-$RELEASE.src.rpm"
-      SRPM_PATH="$FULL_BUILD_DIR/SRPMS/$SRPM"
-
-      spec_validate_tis_release $SPEC_PATH
-      if [ $? -ne 0 ]; then
-          echo "TIS Validation of $SPEC_PATH failed"
-          exit 1
-      fi
-
-      BUILD_NEEDED=0
-      if [ -f $SRPM_PATH ]; then
-          n=$(find $FULL_BUILD_DIR -cnewer $SRPM_PATH | wc -l)
-          if [ $n -gt 0 ]; then
-              BUILD_NEEDED=1
-          fi
-      else
-          BUILD_NEEDED=1
-      fi
-
-      if [ $BUILD_NEEDED -gt 0 ]; then
-          echo "SPEC file: $SPEC_PATH"
-          echo "SRPM build directory: $FULL_BUILD_DIR"
-          echo "TIS_PATCH_VER: $TIS_PATCH_VER"
-          echo "PBR_VERSION: $PBR_VERSION"
-
-          sed -i -e "1 i%define _tis_build_type $BUILD_TYPE" $SPEC_PATH
-          sed -i -e "1 i%define tis_patch_ver $TIS_PATCH_VER" $SPEC_PATH
-          sed -i -e "1 i%define pbr_version $PBR_VERSION" $SPEC_PATH
-
-          # Build the srpm as though for std build, for naming consistency
-          if [ "x$PLATFORM_RELEASE" == "x" ]; then
-             rpmbuild -bs $SPEC_PATH \
-                 --define="%_topdir $FULL_BUILD_DIR" \
-                 --define='_tis_dist .tis' \
-                 --undefine=dist
-          else
-             rpmbuild -bs $SPEC_PATH \
-                 --define="%_topdir $FULL_BUILD_DIR" \
-                 --define='_tis_dist .tis' \
-                 --define="platform_release $PLATFORM_RELEASE" \
-                 --undefine=dist
-          fi
-
-          if [ $? -ne 0 ]; then
-              echo "ERROR: $FUNCNAME (${LINENO}): rpmbuild failed: rpmbuild -bs $SPEC_PATH --define='%_topdir $FULL_BUILD_DIR' --define='_tis_dist .tis' --undefine=dist"
-              return 1
-          fi
-      else
-          echo "SRPM build not needed"
-      fi
-   done
-
-   return 0
-}
-
-
-srpm_extract () {
-   local ORIG_SRPM_PATH=$1
-   local WRS_PKG_DIR=$2
-   local ROOT_DIR=$3
-   local BUILD_DIR=$4
-   local BRANCH=$5
-
-   local USE_GIT=0
-   local ORIG_DIR=$(pwd)
-   local PKG_DIR=$(rpm -q --queryformat '%{NAME}-%{VERSION}-%{RELEASE}\n' --nosignature -p $ORIG_SRPM_PATH)
-
-   if [ "x$ROOT_DIR" == "x" ]; then
-      ROOT_DIR="$MY_WORKSPACE/srpm_assemble"
-   fi
-
-   if [ "x$BUILD_DIR" == "x" ]; then
-      BUILD_DIR="$PKG_DIR/rpmbuild"
-   fi
-
-   local SPEC_DIR="$ROOT_DIR/$BUILD_DIR/SPECS"
-   local SOURCE_DIR="$ROOT_DIR/$BUILD_DIR/SOURCES"
-   local GIT_DIR="$ROOT_DIR/$PKG_DIR/gits"
-   local META_PATCH_TARGET_DIR="$ROOT_DIR/$BUILD_DIR"
-   local ARCH=centos
-
-   if [ ! -d $ROOT_DIR ]; then
-      mkdir -p "$ROOT_DIR"
-   fi
-
-   if [ ! -d $SPEC_DIR ]; then
-      rpm -i --nosignature --root=$ROOT_DIR --define="%_topdir $BUILD_DIR" $ORIG_SRPM_PATH 2>> /dev/null
-      if [ $? -ne 0 ]; then
-          echo "ERROR: $FUNCNAME (${LINENO}): Failed to extract '$ORIG_SRPM_PATH' to '$ROOT_DIR/$BUILD_DIR'"
-          return 1
-      fi
-   fi
-
-   for SPEC in $(cd $SPEC_DIR; ls -1 *.spec); do
-      echo $SPEC;
-      SPEC_GIT="$GIT_DIR/$SPEC"
-      PKG_NAME=$(srpm_spec_find_name $SPEC_DIR/$SPEC 2>> /dev/null)
-      PKG_VER=$(srpm_spec_find_version $SPEC_DIR/$SPEC 2>> /dev/null)
-      TAR_DIR="$PKG_NAME-$PKG_VER"
-      PATCH_TARGET_DIR="$SPEC_GIT/$TAR_DIR"
-      echo "   $TAR_DIR"
-
-      if [ "x$WRS_PKG_DIR" != "x" ]; then
-         echo "srpm_apply_meta_patches '$META_PATCH_TARGET_DIR' '$WRS_PKG_DIR' $USE_GIT '$ARCH' '$BRANCH'"
-         srpm_apply_meta_patches "$META_PATCH_TARGET_DIR" "$WRS_PKG_DIR" $USE_GIT "$ARCH" "$BRANCH"
-         if [ $? -ne 0 ]; then
-            cd $ORIG_DIR
-            return 1
-         fi
-      fi
-   done
-
-   cd $ORIG_DIR
-   return 0
-}
-
-
-srpm_apply_meta_patches () {
-   local META_PATCH_TARGET_DIR=$1
-   local WRS_PKG_DIR=$2
-   local USE_GIT=$3
-   local ARCH=$4
-   local BRANCH=$5
-
-   local ORIG_DIR=$(pwd)
-   local META_PATCH_DIR
-   local PATCH_DIR
-   local PATCH
-   local PATCH_PATH
-   local PO_PATH
-
-   echo "Applying metadata patches"
-   if [ ! -d "$META_PATCH_TARGET_DIR" ]; then
-      echo "ERROR: $FUNCNAME (${LINENO}): directory '$META_PATCH_TARGET_DIR' not found."
-      return 1
-   fi
-
-   if [ ! -d "$WRS_PKG_DIR" ]; then
-      echo "ERROR: $FUNCNAME (${LINENO}): directory '$WRS_PKG_DIR' not found."
-      return 1
-   fi
-
-   META_PATCH_DIR="$WRS_PKG_DIR/$ARCH/meta_patches"
-   PATCH_DIR="$WRS_PKG_DIR/$ARCH/patches"
-   PO_PATH="$META_PATCH_DIR/PATCH_ORDER"
-   if [ ! -f $PO_PATH ]; then
-      echo "No WRS patches to apply"
-      return 0
-   fi
-
-   cd $META_PATCH_TARGET_DIR
-   if [ $? -ne 0 ]; then
-      echo "ERROR: $FUNCNAME (${LINENO}): failed to change directory to '$META_PATCH_TARGET_DIR'"
-      return 1
-   fi
-
-   for PATCH in $(cat $PO_PATH); do
-      PATCH_PATH="$META_PATCH_DIR/$PATCH"
-      if [ ! -f "$PATCH_PATH" ]; then
-         echo "ERROR: $FUNCNAME (${LINENO}): patch '$PATCH_PATH' not found."
-         cd $ORIG_DIR
-         return 1
-      fi
-
-      echo "srpm_apply_patch '$PATCH_PATH' '-p1' '$META_PATCH_TARGET_DIR' $USE_GIT 'WRS: ' '$METHOD_NO_RPMBUILD' '' '' '' '' 0 '$BRANCH' ''"
-      srpm_apply_patch "$PATCH_PATH" "-p1" "$META_PATCH_TARGET_DIR" $USE_GIT "WRS: " $METHOD_NO_RPMBUILD "" "" "" "" 0 "$BRANCH" "" 0
-
-      if [ $? -ne 0 ]; then
-         echo "ERROR: $FUNCNAME (${LINENO}): failed to apply patch '$PATCH'"
-         cd $ORIG_DIR
-         return 1
-      fi
-   done
-
-   local d
-   local dd
-   local f
-   local ff
-
-   if [ -d "$PATCH_DIR" ]; then
-      echo ".gitignore" >> "$META_PATCH_TARGET_DIR/.gitignore"
-      cd $PATCH_DIR
-
-      if [ $? -ne 0 ]; then
-         echo "ERROR: Failed to cd to '$PATCH_DIR'"
-         cd $ORIG_DIR
-         return 1
-      fi
-
-      for dd in $(find . -type d | sort -V); do
-         d=${dd:2}
-         mkdir -p "$META_PATCH_TARGET_DIR/SOURCES/$d"
-         if [ $? -ne 0 ]; then
-            echo "ERROR: Failed to mkdir '$META_PATCH_TARGET_DIR/SOURCES/$d'"
-            cd $ORIG_DIR
-            return 1
-         fi
-      done
-
-      for ff in $(find . -type f | sort -V); do
-         f=${ff:2}
-         d=$(dirname $f)
-         \cp -L -f -v "$PATCH_DIR/$f" "$META_PATCH_TARGET_DIR/SOURCES/$d"
-         if [ $? -ne 0 ]; then
-            echo "ERROR: Failed to copy '$PATCH_DIR/$f' to '$META_PATCH_TARGET_DIR/SOURCES/$d'"
-            cd $ORIG_DIR
-            return 1
-         fi
-         echo "SOURCES/$f" >> "$META_PATCH_TARGET_DIR/.gitignore"
-      done
-   fi
-
-   cd $ORIG_DIR
-   return 0
-}
-
-export GLOBAL_PATCH_TARGET_DIR=""
-
-
-commit_git () {
-   local DIR="$1"
-   local COMMIT_MESSAGE="$2"
-   local TAG="$3"
-
-   local ORIG_DIR=$(pwd)
-
-   # Add and Commit
-   cd $DIR
-   echo "git add .  @  $(pwd)"
-   git add .
-   if [ $? -ne 0 ]; then
-      echo "ERROR: $FUNCNAME (${LINENO}): 'git add' failed for at '$DIR'"
-      cd $ORIG_DIR
-      return 1
-   fi
-
-   echo "git commit --allow-empty -m '$COMMIT_MESSAGE'  @  $(pwd)"
-   git commit --allow-empty -m "$COMMIT_MESSAGE"
-   if [ $? -ne 0 ]; then
-      echo "ERROR: $FUNCNAME (${LINENO}): 'git commit' failed at '$DIR'"
-      cd $ORIG_DIR
-      return 1
-   fi
-
-   # Tag the contents
-   if [ "$TAG" != "" ]; then
-      echo "git tag $TAG  @  $(pwd)"
-      git tag $TAG
-      if [ $? -ne 0 ]; then
-         echo "ERROR: $FUNCNAME (${LINENO}): 'git tag' failed at '$DIR'"
-         cd $ORIG_DIR
-         return 1
-      fi
-   fi
-
-   cd $ORIG_DIR >> /dev/null
-   return 0
-}
-
-init_git_if_required () {
-   local DIR="$1"
-   local COMMIT_MESSAGE="$2"
-   local TAG="$3"
-
-   local ORIG_DIR=$(pwd)
-
-   cd $DIR
-
-   # Initialize git if this is our first time
-   if [ ! -d .git ]; then
-      echo "$(pwd)/.git  not found, creating a new git"
-      echo "git init  @  $(pwd)"
-      git init
-      if [ $? -ne 0 ]; then
-         echo "ERROR: $FUNCNAME (${LINENO}): 'git init' failed for at '$BUILD_DIR'"
-         cd $ORIG_DIR
-         return 1
-      fi
-
-      echo "git add .  @  $(pwd)"
-      git add .
-      if [ $? -ne 0 ]; then
-         echo "ERROR: $FUNCNAME (${LINENO}): 'git add' failed for at '$DIR'"
-         cd $ORIG_DIR
-         return 1
-      fi
-
-      echo "git commit --allow-empty -m '$COMMIT_MESSAGE'  @  $(pwd)"
-      git commit --allow-empty -m "$COMMIT_MESSAGE"
-      if [ $? -ne 0 ]; then
-         echo "ERROR: $FUNCNAME (${LINENO}): 'git commit' failed at '$DIR'"
-         cd $ORIG_DIR
-         return 1
-      fi
-
-      # Tag the contents
-      if [ "$TAG" != "" ]; then
-         echo "git tag $TAG  @  $(pwd)"
-         git tag $TAG
-         if [ $? -ne 0 ]; then
-            echo "ERROR: $FUNCNAME (${LINENO}): 'git tag' failed at '$DIR'"
-            cd $ORIG_DIR
-            return 1
-         fi
-      fi
-   fi
-
-   cd $ORIG_DIR >> /dev/null
-   return 0
-}
-
-prep_git_for_metadata () {
-   local BUILD_DIR="$1"
-   local BRANCH="$2"
-   local NO_META_PATCH="$3"
-   local PRE_WRS_PREFIX="$4"
-
-   local ORIG_BRANCH=""
-   local ORIG_PRE_WRS_TAG=""
-   local ORIG_DIR=$(pwd)
-
-   cd $BUILD_DIR
-
-   # Initialize git if this is our first time
-   init_git_if_required "." "ORIGINAL: initial commit" ""
-   if [ $? -ne 0 ]; then
-      echo "ERROR: $FUNCNAME (${LINENO}): init_git_if_required failed for at '$(pwd)'"
-      cd $ORIG_DIR
-      return 1
-   fi
-
-   if [ "x$NO_META_PATCH" == "x1" ]; then
-      ORIG_BRANCH=$(git rev-parse --abbrev-ref HEAD)
-      ORIG_PRE_WRS_TAG="$PRE_WRS_PREFIX$ORIG_BRANCH"
-   fi
-
-   # Delete branch if it previously existed
-   git checkout $BRANCH &>> /dev/null
-   if [ $? -eq 0 ]; then
-       git checkout master
-       git branch -D $BRANCH
-       if [ $? -ne 0 ]; then
-          echo "ERROR: $FUNCNAME (${LINENO}): failed to delete branch '$BRANCH' at '$(pwd)'"
-          cd $ORIG_DIR
-          return 1
-       fi
-   fi
-
-   # create branch
-   if [ "x$ORIG_PRE_WRS_TAG" != "x" ]; then
-      git checkout $ORIG_PRE_WRS_TAG
-      if [ $? -ne 0 ]; then
-         git checkout master
-      fi
-   else
-      git checkout master
-   fi
-
-   echo "git checkout -b $BRANCH"
-   git checkout -b $BRANCH
-   if [ $? -ne 0 ]; then
-      echo "ERROR: $FUNCNAME (${LINENO}): failed to create branch '$BRANCH' at '$(pwd)'"
-      cd $ORIG_DIR
-      return 1
-   fi
-
-   cd $ORIG_DIR >> /dev/null
-   return 0
-}
-
-   
-tarball_extract () {
-   local SPEC_DIR="${1}"
-   local SPEC="${2}"
-   local SPEC_GIT="${3}"
-   local SOURCE_DIR="${4}"
-   local BRANCH="${5}"
-   local ORIG_BRANCH="${6}"
-   local TAR_DIR="${7}"
-   local ROOT_DIR="${8}"
-   local PKG_DIR="${9}"
-   local BUILD_DIR="${10}"
-   local TARGET_ARCH="${11}"
-   local TIS_PATCH_VER="${12}"
-   local OUTPUT_FILE="${13}"
-   local NO_META_PATCH=${14}
-   local PBR_VERSION=${15}
-   # BUILD_TYPE exported from higher layers 
-
-   echo "tarball_extract  SPEC_DIR=$SPEC_DIR  SPEC=$SPEC  SPEC_GIT=$SPEC_GIT  SOURCE_DIR=$SOURCE_DIR  BRANCH=$BRANCH  ORIG_BRANCH=$ORIG_BRANCH  TAR_DIR=$TAR_DIR  ROOT_DIR=$ROOT_DIR  PKG_DIR=$PKG_DIR  BUILD_DIR=$BUILD_DIR  TARGET_ARCH=$TARGET_ARCH  TIS_PATCH_VER=$TIS_PATCH_VER  OUTPUT_FILE=$OUTPUT_FILE  NO_META_PATCH=$NO_META_PATCH PBR_VERSION=$PBR_VERSION"
-
-   if [ -f $OUTPUT_FILE ]; then
-      \rm -f $OUTPUT_FILE
-   fi
-
-   local ALT_TAR_DIR=""
-   local SOURCE_NO=""
-   local SOURCE_NAME=""
-   local TAR=""
-   local TAR_HAS_CHANGED=1
-   local REAL_TYPE=""
-   local ORIG_DIR="$(pwd)"
-   local TAR_EXTRACT_ARG=""
-   local PATCH_TARGET_DIR=""
-   local EXTRACT_TO_DIR=""
-   local AUTOSETUP_MACRO=""
-   local AUTOSETUP=0
-   local METHOD=$METHOD_RPMBUILD_SCRIPT
-   local RPMBUILD_BP_LOG=$ROOT_DIR/$PKG_DIR/rpmbuild_bp.log
-   local RPMBUILD_BUILD_DIR=$ROOT_DIR/$BUILD_DIR/BUILD
-   local EXCLUDE_PATCH_NUM_CSV=""
-   local RAW_SCRIPT=""
-   local EXTRACT_SCRIPT=""
-
-
-   # Create a directory for the extraction of tarballs
-   echo "SPEC_GIT=$SPEC_GIT"
-   echo "mkdir -p  $SPEC_GIT"
-   mkdir -p  $SPEC_GIT
-   echo "cd $SPEC_GIT"
-   cd $SPEC_GIT
-   pwd
-
-   # Extract tarballs named in spec file
-
-   # Does this spec file use autosetup
-   AUTOSETUP_MACRO=$(grep '%autosetup' $SPEC_DIR/$SPEC)
-   if [ $? -eq 0 ]; then
-      AUTOSETUP=1
-   fi
-
-   if [ $METHOD -eq $METHOD_RPMBUILD_SCRIPT ]; then
-       if [ -d "$RPMBUILD_BUILD_DIR" ]; then
-           echo "rm -rf RPMBUILD_BUILD_DIR=$RPMBUILD_BUILD_DIR"
-           \rm -rf "$RPMBUILD_BUILD_DIR"
-       fi
-       mkdir -p $RPMBUILD_BUILD_DIR
-
-       if [ -f $RPMBUILD_BP_LOG ]; then
-          echo "rm -f RPMBUILD_BP_LOG=$RPMBUILD_BP_LOG"
-          \rm -f $RPMBUILD_BP_LOG
-       fi
-       touch $RPMBUILD_BP_LOG
-
-       RAW_SCRIPT=$ROOT_DIR/$PKG_DIR/raw_script
-       EXTRACT_SCRIPT=$ROOT_DIR/$PKG_DIR/extract_script
-       echo "srpm_create_raw_extract_script '$SPEC_DIR/$SPEC' '$ROOT_DIR/$PKG_DIR'  '$ROOT_DIR/$BUILD_DIR'  '$TARGET_ARCH' '$TIS_PATCH_VER' '$RAW_SCRIPT' '$TAR_DIR' '$PBR_VERSION'"
-       srpm_create_raw_extract_script "$SPEC_DIR/$SPEC" "$ROOT_DIR/$PKG_DIR" "$ROOT_DIR/$BUILD_DIR"  "$TARGET_ARCH" "$TIS_PATCH_VER" "$RAW_SCRIPT" "$TAR_DIR" "$PBR_VERSION"
-       if [ $? -ne 0 ]; then
-           echo "ERROR: $FUNCNAME (${LINENO}): srpm_create_raw_extract_script failed"
-           cd $ORIG_DIR
-           return 1
-       fi
-
-       if [ -d "$RPMBUILD_BUILD_DIR" ]; then
-           echo "rm -rf RPMBUILD_BUILD_DIR=$RPMBUILD_BUILD_DIR"
-           \rm -rf "$RPMBUILD_BUILD_DIR"
-       fi
-       mkdir -p $RPMBUILD_BUILD_DIR
-
-       echo "raw_create_tarballs_extract_script '$RAW_SCRIPT'  '$EXTRACT_SCRIPT'  '$RPMBUILD_BUILD_DIR'  '$SPEC_GIT'"
-       EXTRACT_TO_DIR=$(raw_create_tarballs_extract_script "$RAW_SCRIPT"  "$EXTRACT_SCRIPT"  "$RPMBUILD_BUILD_DIR"  "$SPEC_GIT")
-       if [ $? -ne 0 ]; then
-           echo "ERROR: $FUNCNAME (${LINENO}): srpm_create_raw_extract_script failed"
-           cd $ORIG_DIR
-           return 1
-       fi
-       echo "EXTRACT_TO_DIR=$EXTRACT_TO_DIR"
-   fi
-
-   local EXTRACT_DIR_FILE=""
-   local EXTRACT_TARGET_DIR=""
-   local OLD_EXTRACT_TARGET_DIR=""
-   local SAVE_OLD_EXTRACT_TARGET_DIR=""
-   local PATCH_DIR_FILE=""
-   local PATCH_TARGET_DIR=""
-   local OLD_PATCH_TARGET_DIR=""
-   local SAVE_OLD_PATCH_TARGET_DIR=""
-
-   if [ $METHOD -eq $METHOD_RPMBUILD_SCRIPT ]; then
-      EXTRACT_DIR_FILE=$(dirname $EXTRACT_SCRIPT)/extract_dir
-      PATCH_DIR_FILE=$(dirname $EXTRACT_SCRIPT)/patch_dir
-      if [ -f $EXTRACT_DIR_FILE ]; then
-         OLD_EXTRACT_TARGET_DIR=$(cat $EXTRACT_DIR_FILE)
-      fi
-      if [ -f $PATCH_DIR_FILE ]; then
-         OLD_PATCH_TARGET_DIR=$(cat $PATCH_DIR_FILE)
-      fi
-      if [ "$OLD_PATCH_TARGET_DIR" != "" ] && [ -d $OLD_PATCH_TARGET_DIR ]; then
-         SAVE_OLD_PATCH_TARGET_DIR="${OLD_PATCH_TARGET_DIR}.save"
-         echo "mv $OLD_PATCH_TARGET_DIR $SAVE_OLD_PATCH_TARGET_DIR"
-         mv $OLD_PATCH_TARGET_DIR $SAVE_OLD_PATCH_TARGET_DIR
-      fi
-      if [ "$OLD_EXTRACT_TARGET_DIR" != "" ] && [ -d $OLD_EXTRACT_TARGET_DIR ]; then
-         SAVE_OLD_EXTRACT_TARGET_DIR="${OLD_EXTRACT_TARGET_DIR}.save"
-         echo "mv $OLD_EXTRACT_TARGET_DIR $SAVE_OLD_EXTRACT_TARGET_DIR"
-         mv $OLD_EXTRACT_TARGET_DIR $SAVE_OLD_EXTRACT_TARGET_DIR
-      fi
-      if [ ! -d $SPEC_GIT ]; then
-         mkdir -p $SPEC_GIT
-      fi
-      (
-       source $EXTRACT_SCRIPT
-       RC=$?
-       echo "SRPM_EXTRACT_DIR=$(pwd)"
-       exit $RC
-      ) | tee $EXTRACT_SCRIPT.pre.log
-      if [ ${PIPESTATUS[0]} -ne 0 ] ; then
-         echo "ERROR: Failed in script '$EXTRACT_SCRIPT'"
-         cd $ORIG_DIR
-         return 1
-      fi
-
-      CANONICAL_SPEC_GIT=$(readlink -f "$SPEC_GIT")
-      EXTRACT_TARGET_DIR=$(cat $EXTRACT_DIR_FILE)
-      PATCH_TARGET_DIR=$(cat $PATCH_DIR_FILE)
-      TAR_DIR=$(echo "$PATCH_TARGET_DIR" | sed "s#^$CANONICAL_SPEC_GIT/##" | sed "s#^$CANONICAL_SPEC_GIT##")
-      if [ "$TAR_DIR" == "" ]; then
-         TAR_DIR="."
-      fi
-      echo "=== CANONICAL_SPEC_GIT=$CANONICAL_SPEC_GIT"
-      echo "=== TAR_DIR=$TAR_DIR"
-      echo "=== PATCH_TARGET_DIR=$PATCH_TARGET_DIR"
-      echo "=== EXTRACT_TARGET_DIR=$EXTRACT_TARGET_DIR"
-      if [ "$PATCH_TARGET_DIR" == "$TAR_DIR" ] || [ "$PATCH_TARGET_DIR" == "" ] || [ "$EXTRACT_TARGET_DIR" == "" ] || [[ "$TAR_DIR" == /* ]]; then
-         echo "Something went wrong"
-         cd $ORIG_DIR
-         return 1
-      fi
-      
-      echo "rm -rf $PATCH_TARGET_DIR; mkdir -p  $PATCH_TARGET_DIR"
-      \rm -rf "$PATCH_TARGET_DIR"
-      mkdir -p "$PATCH_TARGET_DIR"
-
-      if [ "$SAVE_OLD_EXTRACT_TARGET_DIR" != "" ] && [ -d $SAVE_OLD_EXTRACT_TARGET_DIR ]; then
-         echo "mv $SAVE_OLD_EXTRACT_TARGET_DIR $OLD_EXTRACT_TARGET_DIR"
-         if [ -d $OLD_EXTRACT_TARGET_DIR ]; then
-            \rm -rf $OLD_EXTRACT_TARGET_DIR
-         fi
-         mv $SAVE_OLD_EXTRACT_TARGET_DIR $OLD_EXTRACT_TARGET_DIR
-      fi
-      if [ "$SAVE_OLD_PATCH_TARGET_DIR" != "" ] && [ -d $SAVE_OLD_PATCH_TARGET_DIR ]; then
-         echo "mv $SAVE_OLD_PATCH_TARGET_DIR $OLD_PATCH_TARGET_DIR"
-         if [ -d $OLD_PATCH_TARGET_DIR ]; then
-            \rm -rf $OLD_EXTRACT_TARGET_DIR
-         fi
-         mv $SAVE_OLD_PATCH_TARGET_DIR $OLD_PATCH_TARGET_DIR
-      fi
-   else
-      # Figure out where tarball will extract to... 
-      # afterwards  ALT_TAR_DIR = common path element found in all files in the tarball
-      for SOURCE_NO in $(grep -i '^Source[0-9]*:' $SPEC_DIR/$SPEC | awk -F : '{print $1}' | sort  --unique --version-sort); do
-         echo "   $SOURCE_NO"
-         SOURCE_NAME=$(spec_find_tag $SOURCE_NO $SPEC_DIR/$SPEC 2>> /dev/null | awk -F / '{print $NF}')
-         if [ "x$SOURCE_NAME" != "x" ]; then
-            echo "      $SOURCE_NAME"
-            TAR="$SOURCE_DIR/$SOURCE_NAME"
-            echo "         TAR=$TAR"
-            # Where will the tarball install to ... put it in ALT_TAR_DIR
-            if [ -f $TAR ]; then
-               if [ "$ALT_TAR_DIR" == "" ]; then
-                  if [ "x$ORIG_BRANCH" == "x" ]; then
-                     TAR_HAS_CHANGED=1
-                  else
-                     cd $SOURCE_DIR
-                     TAR_HAS_CHANGED=$(git diff $BRANCH $ORIG_BRANCH --name-only -- $SOURCE_NAME | wc -l)
-                     cd - >> /dev/null
-                  fi
-      
-                  echo "         TAR_HAS_CHANGED=$TAR_HAS_CHANGED"
-      
-                  case $SOURCE_NAME in
-                     *.tar.gz)  REAL_TYPE=$(file $TAR | awk -F : '{ print $2 }')   
-                                # For whatever reason, centos-release-7-2.1511.tar.gz is actually
-                                # an uncompressed tarball, regardless of the name
-                                if [ "$REAL_TYPE" == " POSIX tar archive (GNU)" ];  then
-                                   ALT_TAR_DIR=$(tar_cmd_common_dir "tar -tvf $TAR")
-                                else
-                                   ALT_TAR_DIR=$(tar_cmd_common_dir "tar -tzvf $TAR")
-                                fi
-                                ;;
-                     *.tgz)     ALT_TAR_DIR=$(tar_cmd_common_dir "tar -tzvf $TAR") ;;
-                     *.tar.bz2) ALT_TAR_DIR=$(tar_cmd_common_dir "tar -tjvf $TAR") ;;
-                     *.tar.xz)  ALT_TAR_DIR=$(tar_cmd_common_dir "tar -tJvf $TAR") ;;
-                     *.tar)     ALT_TAR_DIR=$(tar_cmd_common_dir "tar -tvf $TAR") ;;
-                     *)         echo "skipping '$SOURCE_NAME'";;
-                  esac
-                  echo "         ALT_TAR_DIR=$ALT_TAR_DIR"
-               fi
-            else
-               echo "ERROR: $FUNCNAME (${LINENO}): '$SOURCE_NAME' not found in '$SOURCE_DIR'"
-               cd $ORIG_DIR
-               return 1
-            fi
-         else
-            echo "WARNING: nothing found by 'spec_find_tag $SOURCE_NO $SPEC_DIR/$SPEC'"
-         fi
-      done
-
-      echo "TAR_DIR=$TAR_DIR"
-      echo "ALT_TAR_DIR=$ALT_TAR_DIR"
-
-      if [ "$ALT_TAR_DIR" == "." ]; then
-         TAR_EXTRACT_ARG=" -C $TAR_DIR"
-      elif [ "$ALT_TAR_DIR" != "." ] && [ "$ALT_TAR_DIR" != "" ]; then
-         if [ $AUTOSETUP -eq 0 ]; then
-            TAR_DIR="$ALT_TAR_DIR"
-         else
-            TAR_DIR="$TAR_DIR/$ALT_TAR_DIR"
-         fi
-      fi
-
-      PATCH_TARGET_DIR="$SPEC_GIT/$TAR_DIR"
-   fi
-
-   export GLOBAL_PATCH_TARGET_DIR="$PATCH_TARGET_DIR"
-   echo "TAR_DIR=$TAR_DIR"
-   echo "PATCH_TARGET_DIR=$PATCH_TARGET_DIR"
-
-   if [ -z "$TAR_DIR" ]; then
-       echo "No tarball found."
-       return 1
-   fi
-
-   if [ "x$NO_META_PATCH" == "x1" ] && [ -d "$TAR_DIR" ] && [ $(ls -1 "$TAR_DIR" | wc -l) -gt 0 ]; then
-      echo "Tarball already extracted, and we are processing an upgrade. Skipping tarball extract"
-      echo "PATCH_TARGET_DIR=$PATCH_TARGET_DIR" > $OUTPUT_FILE
-      echo "EXCLUDE_PATCH_NUM_CSV=$EXCLUDE_PATCH_NUM_CSV" >> $OUTPUT_FILE
-      echo "METHOD=$METHOD" >> $OUTPUT_FILE
-      echo "RAW_SCRIPT=$RAW_SCRIPT" >> $OUTPUT_FILE
-      echo "RPMBUILD_BUILD_DIR=$RPMBUILD_BUILD_DIR" >> $OUTPUT_FILE
-      return 0
-   fi
-
-   if [ ! -d "$TAR_DIR" ]; then
-      mkdir -p $TAR_DIR
-   fi
-
-   if [ -d "$TAR_DIR" ]; then
-      cd $TAR_DIR
-
-      (init_git_if_required "." "ORIGINAL: initial commit" "")
-      if [ $? -ne 0 ]; then
-         echo "ERROR: $FUNCNAME (${LINENO}): init_git_if_required failed at '$(pwd)' while extracting '$SPEC_PATH'"
-         cd $ORIG_DIR
-         return 1
-      fi
-
-      echo "git created at '$(pwd)'"
-      cd - >> /dev/null
-   fi
-
-   local NEED_TAR_EXTRACT=1
-
-   # Set up Branch
-   if [ -d "$TAR_DIR" ]; then
-      echo "cd '$TAR_DIR'"
-      cd $TAR_DIR
-      pwd
-
-      # Delete old branch if it exists
-      echo "git checkout $BRANCH  @  $(pwd)"
-      git checkout $BRANCH &>> /dev/null
-      if [ $? -eq 0 ]; then
-          echo "git checkout master  @  $(pwd)"
-          git checkout master
-          echo "git branch -D $BRANCH  @  $(pwd)"
-          git branch -D $BRANCH
-          if [ $? -ne 0 ]; then
-             echo "ERROR: $FUNCNAME (${LINENO}): failed to delete branch '$BRANCH'"
-             cd $ORIG_DIR
-             return 1
-          fi
-      fi
-
-      # Determine origin of our branch
-      if [ $TAR_HAS_CHANGED -gt 0 ]; then
-         echo "git checkout master  @  $(pwd)"
-         git checkout master
-      else
-         echo "git checkout $ORIG_PRE_WRS_TAG  @  $(pwd)"
-         git checkout $ORIG_PRE_WRS_TAG
-         if [ $? -eq 0 ]; then
-            NEED_TAR_EXTRACT=0
-         else
-            git checkout master
-         fi
-      fi
-
-      cd - >> /dev/null
-   fi
-
-   # Extract tarball(s) if needed
-   echo "NEED_TAR_EXTRACT=$NEED_TAR_EXTRACT"
-   if [ $NEED_TAR_EXTRACT -eq 1 ]; then
-
-       # Create branch
-       echo "cd $TAR_DIR; git checkout -b $BRANCH"
-       cd $TAR_DIR
-       git checkout -b $BRANCH
-       if [ $? -ne 0 ]; then
-          echo "ERROR: $FUNCNAME (${LINENO}): failed to create branch '$BRANCH'"
-          cd $ORIG_DIR
-          return 1
-       fi
-       cd - >> /dev/null
-
-       #########################################################################
-       if [ $METHOD -eq $METHOD_NO_RPMBUILD ]; then
-           # Don't use rpmbuild to extrace tarball, instead try to do it for ourselves
-           for SOURCE_NO in $(grep -i '^Source[0-9]*:' $SPEC_DIR/$SPEC | awk -F : '{print $1}'); do
-              echo "   $SOURCE_NO"
-              local NO=$(echo $SOURCE_NO | sed 's/Source//')
-              SOURCE_NAME=$(spec_find_tag $SOURCE_NO $SPEC_DIR/$SPEC 2>> /dev/null | awk -F / '{print $NF}')
-              echo "      $SOURCE_NAME"
-              TAR="$SOURCE_DIR/$SOURCE_NAME"
-              echo "         $TAR"
-              if [ -f $TAR ]; then
-                 if [ $NEED_TAR_EXTRACT -eq 1 ]; then
-                    echo "spec_untar_path '$NO' '$SPEC_DIR/$SPEC'"
-                    local UNTAR_PATH=$(spec_untar_path "$NO" "$SPEC_DIR/$SPEC")
-                    echo "UNTAR_PATH=$UNTAR_PATH"
-                    mkdir -p $UNTAR_PATH
-                    if [ $? -ne 0 ]; then
-                        echo "ERROR: $FUNCNAME (${LINENO}): command failed: mkdir -p $UNTAR_PATH"
-                        cd $ORIG_DIR
-                        return 1
-                    fi
-                    (
-                    cd $UNTAR_PATH
-                    case $SOURCE_NAME in
-                       *.tar.gz)  REAL_TYPE=$(file $TAR | awk -F : '{ print $2 }')
-                          # For whatever reason, centos-release-7-2.1511.tar.gz is actually
-                          # an uncompressed tarball, regardless of the name
-                          if [ "$REAL_TYPE" == " POSIX tar archive (GNU)" ];  then
-                             tar_cmd_common_dir "tar -xvf $TAR $TAR_EXTRACT_ARG"
-                          else
-                             tar_cmd_common_dir "tar -xzvf $TAR $TAR_EXTRACT_ARG"
-                          fi
-                          ;;
-                       *.tgz)     tar -xzvf $TAR $TAR_EXTRACT_ARG ;;
-                       *.tar.bz2) tar -xjvf $TAR $TAR_EXTRACT_ARG ;;
-                       *.tar.xz)  tar -xJvf $TAR $TAR_EXTRACT_ARG ;;
-                       *.tar)     tar -xvf $TAR $TAR_EXTRACT_ARG ;;
-                       *) echo "skipping '$SOURCE_NAME'";;
-                    esac
-                    exit $?
-                    )
-                    if [ $? -ne 0 ]; then
-                        echo "ERROR: $FUNCNAME (${LINENO}): tar failed to extract '$TAR'"
-                        cd $ORIG_DIR
-                        return 1
-                    fi
-                 fi
-              else
-                 echo "ERROR: $FUNCNAME (${LINENO}): '$SOURCE_NAME' not found in '$SOURCE_DIR'"
-                 cd $ORIG_DIR
-                 return 1
-              fi
-           done
-       fi
-
-       #########################################################################
-       if [ $METHOD -eq $METHOD_RPMBUILD_UNPATCH ]; then
-           if [ -d "$RPMBUILD_BUILD_DIR" ]; then
-               \rm -rf "$RPMBUILD_BUILD_DIR"
-           fi
-           mkdir -p $RPMBUILD_BUILD_DIR
-
-           # The following rpmbuild will extract all tarballs, run any other prep script, and apply all patches
-
-           local NEED_PATCH_ROLLBACK=0
-           local LAST_PATCH=$(grep '^%patch[0-9]' $SPEC_DIR/$SPEC | tail -n 1 | awk '{ print $1 }')
-           if [ "x$LAST_PATCH" == "x" ]; then
-               cat $SPEC_DIR/$SPEC | grep -v '^git ' > $SPEC_DIR/_$SPEC
-           else
-               cat $SPEC_DIR/$SPEC | grep -v '^git ' | grep -v '^%build' | sed "/$LAST_PATCH/a %build" > $SPEC_DIR/_$SPEC
-               NEED_PATCH_ROLLBACK=1
-           fi
-
-           if [ -f $RPMBUILD_BP_LOG ]; then
-              \rm -f $RPMBUILD_BP_LOG
-           fi
-           touch $RPMBUILD_BP_LOG
-           # Note stdout and stderr go to same file,  must not use 2>&1 syntax as it doesn't guarantee order
-           # Build the srpm as though for std build, for naming consistency
-           echo "rpmbuild -bp $SPEC_DIR/_$SPEC --root $ROOT_DIR/$PKG_DIR --define='%_topdir $ROOT_DIR/$BUILD_DIR' --define='_tis_dist .tis' --nodeps --target $TARGET_ARCH >> $RPMBUILD_BP_LOG 2>> $RPMBUILD_BP_LOG"
-           rpmbuild -bp $SPEC_DIR/_$SPEC --root $ROOT_DIR/$PKG_DIR \
-               --define="%_topdir $ROOT_DIR/$BUILD_DIR" \
-               --define='_tis_dist .tis' \
-               --define="_tis_build_type $BUILD_TYPE" \
-               --nodeps --target $TARGET_ARCH >> $RPMBUILD_BP_LOG 2>> $RPMBUILD_BP_LOG
-           if [ $? -ne 0 ]; then
-               echo "ERROR: $FUNCNAME (${LINENO}): command failed: rpmbuild -bp $SPEC_DIR/$SPEC --root $ROOT_DIR/$PKG_DIR --define='%_topdir $ROOT_DIR/$BUILD_DIR' --define='_tis_dist .tis' --nodeps --target $TARGET_ARCH > $RPMBUILD_BP_LOG"
-               cd $ORIG_DIR
-               return 1
-           fi
-
-           \rm -f $SPEC_DIR/_$SPEC
-
-           if [ $NEED_PATCH_ROLLBACK -eq 1 ]; then
-              # But we don't want patches yet, so roll them back.
-              # Use the log from rpmbuild to learn what patches to roll back, in what order, and with what arguements
-              for n in $(grep '^[Pp]atch #' $RPMBUILD_BP_LOG | tac | awk '{ print $2 }' | sed 's/#//'); do
-                 cmd1=$(cat $RPMBUILD_BP_LOG | sed -n "/^[Pp]atch #$n /,/^patching/p" | grep '^+' | sed 's/^+ //' | grep '[/]cat')
-                 cmd2=$(cat $RPMBUILD_BP_LOG | sed -n "/^[Pp]atch #$n /,/^patching/p" | grep '^+' | sed 's/^+ //' | grep '[/]patch')
-                 cmd="$cmd1 | $cmd2 -R"
-                 (
-                    echo "Remove patch #$n"
-                    cd $RPMBUILD_BUILD_DIR/$TAR_DIR
-                    echo "$cmd"
-                    eval $cmd
-                    if [ ${PIPESTATUS[0]} -ne 0 ] ; then
-                       echo "ERROR: $FUNCNAME (${LINENO}): failed command: $cmd"
-                       return 1
-                    fi
-                 )
-                 if [ $? -ne 0 ]; then
-                    return 1
-                 fi
-              done
-           fi
-
-           echo "find $RPMBUILD_BUILD_DIR/$TAR_DIR/ -mindepth 1 -maxdepth 1 -exec mv -t $SPEC_GIT/$TAR_DIR/ -- {} +"
-           find $RPMBUILD_BUILD_DIR/$TAR_DIR/ -mindepth 1 -maxdepth 1 -exec mv -t $SPEC_GIT/$TAR_DIR/ -- {} +
-
-           \rm -rf "$RPMBUILD_BUILD_DIR"
-
-           grep '^%patch[0-9]* ' $SPEC_DIR/$SPEC > /dev/null
-           if [ $? -eq 0 ];then
-              echo "Using '%patch' method"
-
-             local PATCH_NO=""
-              # for PATCH_NO in $(grep '^%patch[0-9]* ' $SPEC_DIR/$SPEC | awk  '{print $1}' | sed 's/^%patch//') ; do
-              for PATCH_NO in $(grep -i '^[Pp]atch[0-9]*:' "$SPEC_DIR/$SPEC" | awk -F : '{print $1}' | sed 's/^[Pp]atch//' | sort  --unique --version-sort); do
-                 grep "^[Pp]atch #$PATCH_NO " $RPMBUILD_BP_LOG
-                 if [ $? -ne 0 ]; then
-                    if [ "x$EXCLUDE_PATCH_NUM_CSV" == "x" ]; then
-                       EXCLUDE_PATCH_NUM_CSV="$PATCH_NO"
-                    else
-                       EXCLUDE_PATCH_NUM_CSV="$EXCLUDE_PATCH_NUM_CSV,$PATCH_NO"
-                    fi
-                 fi
-              done
-           else
-              grep '^git am' $SPEC_DIR/$SPEC > /dev/null
-              if [ $? -eq 0 ];then
-                 echo "Using 'git am' method, EXCLUDE_PATCH_NUM_CSV=''"
-              else
-                 echo "Warning: no known patch apply command, EXCLUDE_PATCH_NUM_CSV=''"
-              fi
-           fi
-       fi
-
-       #########################################################################
-       if [ $METHOD -eq $METHOD_RPMBUILD_SCRIPT ]; then
-           (
-            # SAL
-            source $EXTRACT_SCRIPT
-            RC=$?
-            echo "SRPM_EXTRACT_DIR=$(pwd)"
-            exit $RC
-           ) | tee $EXTRACT_SCRIPT.log
-           if [ ${PIPESTATUS[0]} -ne 0 ] ; then
-              echo "ERROR: Failed in script '$EXTRACT_SCRIPT'"
-              cd $ORIG_DIR
-              return 1
-           fi
-
-           local TMP_PATCH_TARGET_DIR=$(cat $PATCH_DIR_FILE)
-           if [ "x$TMP_PATCH_TARGET_DIR" != "x" ]; then
-              export GLOBAL_PATCH_TARGET_DIR=$TMP_PATCH_TARGET_DIR
-              echo "EXTRACT_TO_DIR=$EXTRACT_TO_DIR"
-              echo "GLOBAL_PATCH_TARGET_DIR=$GLOBAL_PATCH_TARGET_DIR"
-              EXTRACT_TO_DIR="$GLOBAL_PATCH_TARGET_DIR"
-           fi
-
-           if [ -z "$EXTRACT_TO_DIR" ]; then
-               echo "Failed to get EXTRACT_TO_DIR from raw_create_tarballs_extract_script"
-               cd $ORIG_DIR
-               return 1
-           fi
-
-           if [ "$EXTRACT_TO_DIR" != "$PATCH_TARGET_DIR" ]; then
-               echo "Change PATCH_TARGET_DIR from '$PATCH_TARGET_DIR' to '$EXTRACT_TO_DIR'"
-               PATCH_TARGET_DIR="$EXTRACT_TO_DIR"
-               export GLOBAL_PATCH_TARGET_DIR="$PATCH_TARGET_DIR"
-           fi
-
-           echo "rm -rf $RPMBUILD_BUILD_DIR"
-           \rm -rf "$RPMBUILD_BUILD_DIR"
-
-
-       fi
-   fi
-
-   echo "aaa TAR_DIR=$TAR_DIR"
-   if [ ! -d "$TAR_DIR" ]; then
-      echo "ERROR: $FUNCNAME (${LINENO}): Failed to create expected TAR_DIR='$TAR_DIR' from $(pwd)"
-      cd $ORIG_DIR
-      return 1
-   fi
-
-   # track extracted tarball in git
-   cd "$TAR_DIR"
-   echo "NEED_TAR_EXTRACT=$NEED_TAR_EXTRACT"
-   echo "cd PATCH_TARGET_DIR=$PATCH_TARGET_DIR"
-   cd "$PATCH_TARGET_DIR"
-
-   if [ $NEED_TAR_EXTRACT -eq 1 ]; then
-      commit_git "." "ORIGINAL: extracted archive" ""
-      if [ $? -ne 0 ]; then
-         echo "ERROR: $FUNCNAME (${LINENO}): commit_git failed (post tarball extracted) while extracting '$TAR'"
-         cd $ORIG_DIR
-         return 1
-      fi
-   fi
-
-   echo "PATCH_TARGET_DIR=$PATCH_TARGET_DIR" > $OUTPUT_FILE
-   echo "EXCLUDE_PATCH_NUM_CSV=$EXCLUDE_PATCH_NUM_CSV" >> $OUTPUT_FILE
-   echo "METHOD=$METHOD" >> $OUTPUT_FILE
-   echo "RAW_SCRIPT=$RAW_SCRIPT" >> $OUTPUT_FILE
-   echo "RPMBUILD_BUILD_DIR=$RPMBUILD_BUILD_DIR" >> $OUTPUT_FILE
-   return 0
-}
-
-tar_and_spec_extract_to_git () {
-   local SPEC_PATH=$1
-   local WRS_PKG_DIR=$2
-   local ROOT_DIR=$3
-   local BUILD_DIR=$4
-   local BRANCH=$5
-   local NO_META_PATCH=$6
-   local TIS_PATCH_VER=$7
-   local PBR_VERSION=$8
-   local USE_GIT=1
-   local TARGET_ARCH=x86_64
-
-   if [ ! -f $SPEC_PATH ]; then
-      echo "ERROR: $FUNCNAME (${LINENO}): spec not found '$SPEC_PATH'"
-      return 1
-   fi
-
-   local ORIG_DIR=$(pwd)
-
-   if [ "x$ROOT_DIR" == "x" ]; then
-      ROOT_DIR="$MY_WORKSPACE/srpm_work"
-   fi
-
-   if [ "x$BUILD_DIR" == "x" ]; then
-      BUILD_DIR="$PKG_DIR/rpmbuild"
-   fi
-
-   if [ "x$BRANCH" == "x" ]; then
-      BRANCH="work"
-   fi
-
-   local SPEC_DIR="$ROOT_DIR/$BUILD_DIR/SPECS"
-   local SOURCE_DIR="$ROOT_DIR/$BUILD_DIR/SOURCES"
-   local GIT_DIR="$ROOT_DIR/$(dirname $BUILD_DIR)/gits"
-   local PATCH_TARGET_DIR
-   local META_PATCH_TARGET_DIR="$ROOT_DIR/$BUILD_DIR"
-   local ARCH=centos
-   local ORIG_BRANCH=""
-   local PRE_WRS_PREFIX="pre_wrs_"
-   local WRS_POST_COPY_PREFIX="wrs_post_copy_list_"
-   local PRE_WRS_TAG="$PRE_WRS_PREFIX$BRANCH"
-   local WRS_POST_COPY_TAG="$WRS_POST_COPY_PREFIX$BRANCH"
-   local ORIG_PRE_WRS_TAG=""
-   local THIS_FUNC
-
-   if [ "x$WRS_PKG_DIR" != "x" ]; then
-      if [ ! -d $WRS_PKG_DIR ]; then
-          echo "ERROR: $FUNCNAME (${LINENO}): WRS_PKG_DIR not found '$WRS_PKG_DIR'"
-          return 1
-      fi
-   fi
-
-   if [ ! -d $ROOT_DIR ]; then
-      mkdir -p "$ROOT_DIR"
-   fi
-
-   if [ ! -d $ROOT_DIR/$BUILD_DIR ]; then
-      mkdir -p "$ROOT_DIR/$BUILD_DIR"
-   else
-      if [ "x$NO_META_PATCH" != "x1" ]; then
-          echo ""
-          echo "Warning: Refusing to overwrite pre-existing edit environment for '$PKG_DIR'."
-          echo "         To delete the old edit environment use:   --edit --clean <PKG>"
-          return 2
-      fi
-   fi
-
-   prep_git_for_metadata "$ROOT_DIR/$BUILD_DIR" "$BRANCH" $NO_META_PATCH "$PRE_WRS_PREFIX"
-   if [ $? -ne 0 ]; then
-      echo "ERROR: $FUNCNAME (${LINENO}): 'prep_git_for_metadata' failed while extracting '$SPEC_PATH'"
-      cd $ORIG_DIR
-      return 1
-   fi
-
-   # Copy SPEC and TAR
-   mkdir -p "$SPEC_DIR"
-   if [ $? -ne 0 ]; then
-       echo "ERROR: $FUNCNAME (${LINENO}): Failed to create directory '$SPEC_DIR'"
-       return 1
-   fi
-
-   mkdir -p "$SOURCE_DIR"
-   if [ $? -ne 0 ]; then
-       echo "ERROR: $FUNCNAME (${LINENO}): Failed to create directory '$SOURCE_DIR'"
-       return 1
-   fi
-
-   cp -f "$SPEC_PATH" "$SPEC_DIR"
-   if [ $? -ne 0 ]; then
-       echo "ERROR: $FUNCNAME (${LINENO}): Failed to copy '$SPEC_PATH' to '$SPEC_DIR'"
-       return 1
-   fi
-
-   # Add and Commit
-   commit_git "$ROOT_DIR/$BUILD_DIR" "WRS: spec file" "$PRE_WRS_TAG"
-   if [ $? -ne 0 ]; then
-      echo "ERROR: $FUNCNAME (${LINENO}): commit_git failed while extracting '$SPEC_PATH'"
-      cd $ORIG_DIR
-      return 1
-   fi
-
-
-   local SPEC_GIT
-   local PKG_NAME
-   local PKG_VER
-   local TAR_DIR
-   local TAR
-   local SOURCE_NO
-   local SOURCE_NAME
-   local PATCH_NO
-   local PATCH_NAME
-   local NUM_TAR
-   local TAR_LIST
-
-
-   for SPEC in $(cd $SPEC_DIR; ls -1 *.spec); do
-      echo $SPEC;
-      SPEC_GIT="$GIT_DIR/$SPEC"
-      PKG_NAME=$(spec_find_tag Name $SPEC_DIR/$SPEC 2>> /dev/null)
-      PKG_VER=$(spec_find_tag Version $SPEC_DIR/$SPEC 2>> /dev/null)
-      TAR_DIR="$PKG_NAME-$PKG_VER"
-      echo "   $TAR_DIR"
-
-      local TAR_HAS_CHANGED
-
-      TAR_HAS_CHANGED=1
-
-      # Copy content from COPY_LIST if defined
-      if [ "x$COPY_LIST" != "x" ]; then
-         echo "COPY_LIST: $COPY_LIST"
-         cd $WRS_PKG_DIR
-         for p in $COPY_LIST; do
-            echo "COPY_LIST: $p"
-            \cp -L -f -r -v $p $META_PATCH_TARGET_DIR/SOURCES
-            if [ $? -ne 0 ]; then
-               echo "ERROR: COPY_LIST: file not found: '$p'"
-               cd $ORIG_DIR
-               return 1
-            fi
-         done
-
-         cd - >> /dev/null
-
-         # Add and Commit
-         commit_git "$META_PATCH_TARGET_DIR" "WRS: COPY_LIST content" "$WRS_POST_COPY_TAG"
-         if [ $? -ne 0 ]; then
-            echo "ERROR: $FUNCNAME (${LINENO}): commit_git failed while extracting '$SPEC_PATH'"
-            cd $ORIG_DIR
-            return 1
-         fi
-      fi
-
-      local PATCH_TARGET_DIR=""
-      local EXCLUDE_PATCH_NUM_CSV=""
-      local METHOD=""
-      local RAW_SCRIPT=""
-      local RPMBUILD_BUILD_DIR=""
-      local OUTPUT_FILE="$ROOT_DIR/$PKG_DIR/tarball_extract_result"
-
-      tarball_extract  "$SPEC_DIR"  "$SPEC"  "$SPEC_GIT"  "$SOURCE_DIR"  "$BRANCH"  "$ORIG_BRANCH"  "$TAR_DIR"  "$ROOT_DIR"  "$PKG_DIR"  "$BUILD_DIR"  "$TARGET_ARCH"  "$TIS_PATCH_VER"  "$OUTPUT_FILE" "$NO_META_PATCH" "$PBR_VERSION"
-      if [ $? -ne 0 ]; then
-         echo "ERROR: $FUNCNAME (${LINENO}): tarball_extract failed while extracting '$SPEC_PATH'"
-         cd $ORIG_DIR
-         return 1
-      fi
-
-      source $OUTPUT_FILE
-
-      # Apply patches named in spec file.
-      echo "srpm_apply_spec_patches '$SPEC_DIR/$SPEC' '$SOURCE_DIR' '$PATCH_TARGET_DIR' '$EXCLUDE_PATCH_NUM_CSV' $USE_GIT '' '$METHOD' '$RAW_SCRIPT' '$ROOT_DIR' '$RPMBUILD_BUILD_DIR'  '$SPEC_GIT' '$BRANCH'"
-      srpm_apply_spec_patches "$SPEC_DIR/$SPEC" "$SOURCE_DIR" "$PATCH_TARGET_DIR" "$EXCLUDE_PATCH_NUM_CSV" $USE_GIT "" $METHOD "$RAW_SCRIPT" "$ROOT_DIR" "$RPMBUILD_BUILD_DIR"  "$SPEC_GIT" "$BRANCH"
-      if [ $? -ne 0 ]; then
-         echo "ERROR: $FUNCNAME (${LINENO}): srpm_apply_spec_patches failed while extracting '$SPEC_PATH'"
-         cd $ORIG_DIR
-         return 1
-      fi
-
-   done
-
-}
-
-
-
-srpm_extract_to_git () {
-   local ORIG_SRPM_PATH=$1
-   local WRS_PKG_DIR=$2
-   local ROOT_DIR=$3
-   local BUILD_DIR=$4
-   local BRANCH=$5
-   local NO_META_PATCH=$6
-   local TIS_PATCH_VER=${7:-0}
-   local PBR_VERSION=${8:-0}
-   local USE_GIT=1
-   local TARGET_ARCH=x86_64
-
-   if [ ! -f $ORIG_SRPM_PATH ]; then
-      echo "ERROR: $FUNCNAME (${LINENO}): srpm not found '$ORIG_SRPM_PATH'"
-      return 1
-   fi
-
-   local ORIG_DIR=$(pwd)
-   local PKG_DIR=$(rpm -q --queryformat '%{NAME}\n' --nosignature -p $ORIG_SRPM_PATH)
-
-   if [ "x$ROOT_DIR" == "x" ]; then
-      ROOT_DIR="$MY_WORKSPACE/srpm_work"
-   fi
-
-   if [ "x$BUILD_DIR" == "x" ]; then
-      BUILD_DIR="$PKG_DIR/rpmbuild"
-   fi
-
-   if [ "x$BRANCH" == "x" ]; then
-      BRANCH="work"
-   fi
-
-   local SPEC_DIR="$ROOT_DIR/$BUILD_DIR/SPECS"
-   local SOURCE_DIR="$ROOT_DIR/$BUILD_DIR/SOURCES"
-   local GIT_DIR="$ROOT_DIR/$(dirname $BUILD_DIR)/gits"
-   local PATCH_TARGET_DIR
-   local META_PATCH_TARGET_DIR="$ROOT_DIR/$BUILD_DIR"
-   local ARCH=centos
-   local ORIG_BRANCH=""
-   local PRE_WRS_PREFIX="pre_wrs_"
-   local WRS_POST_COPY_PREFIX="wrs_post_copy_list_"
-   local PRE_WRS_TAG="$PRE_WRS_PREFIX$BRANCH"
-   local WRS_POST_COPY_TAG="$WRS_POST_COPY_PREFIX$BRANCH"
-   local ORIG_PRE_WRS_TAG=""
-   local THIS_FUNC
-
-
-   if [ "x$WRS_PKG_DIR" != "x" ]; then
-      if [ ! -d $WRS_PKG_DIR ]; then
-          echo "ERROR: $FUNCNAME (${LINENO}): WRS_PKG_DIR not found '$WRS_PKG_DIR'"
-          return 1
-      fi
-   fi
-
-   if [ ! -d $ROOT_DIR ]; then
-      mkdir -p "$ROOT_DIR"
-   fi
-
-   if [ ! -d $ROOT_DIR/$BUILD_DIR ]; then
-      mkdir -p "$ROOT_DIR/$BUILD_DIR"
-   else
-      if [ "x$NO_META_PATCH" != "x1" ]; then
-          echo ""
-          echo "Warning: Refusing to overwrite pre-existing edit environment for '$PKG_DIR'."
-          echo "         To delete the old edit environment use:   --edit --clean <PKG>"
-          return 2
-      fi
-   fi
-
-   prep_git_for_metadata "$ROOT_DIR/$BUILD_DIR" "$BRANCH" $NO_META_PATCH "$PRE_WRS_PREFIX"
-   if [ $? -ne 0 ]; then
-      echo "ERROR: $FUNCNAME (${LINENO}): prep_git_for_metadata failed while extracting '$ORIG_SRPM_PATH'"
-      cd $ORIG_DIR
-      return 1
-   fi
-
-   # Extract src.rpm
-   echo "rpm -i --nosignature --root=$ROOT_DIR --define='%_topdir $BUILD_DIR' $ORIG_SRPM_PATH"
-   rpm -i --nosignature --root=$ROOT_DIR --define="%_topdir $BUILD_DIR" $ORIG_SRPM_PATH
-   if [ $? -ne 0 ]; then
-       echo "ERROR: $FUNCNAME (${LINENO}): Failed to extract '$ORIG_SRPM_PATH' to '$ROOT_DIR/$BUILD_DIR'"
-       return 1
-   fi
-
-   # Add and Commit
-   commit_git "$ROOT_DIR/$BUILD_DIR" "ORIGINAL: srpm extract" "$PRE_WRS_TAG"
-   if [ $? -ne 0 ]; then
-      echo "ERROR: $FUNCNAME (${LINENO}): commit_git failed while extracting '$ORIG_SRPM_PATH'"
-      cd $ORIG_DIR
-      return 1
-   fi
-
-   local SPEC_GIT
-   local PKG_NAME
-   local PKG_VER
-   local TAR_DIR
-   local TAR
-   local SOURCE_NO
-   local SOURCE_NAME
-   local PATCH_NO
-   local PATCH_NAME
-   local NUM_TAR
-   local TAR_LIST
-
-
-   for SPEC in $(cd $SPEC_DIR; ls -1 *.spec); do
-      echo $SPEC;
-      SPEC_GIT="$GIT_DIR/$SPEC"
-      PKG_NAME=$(srpm_spec_find_name $SPEC_DIR/$SPEC 2>> /dev/null)
-      PKG_VER=$(srpm_spec_find_version $SPEC_DIR/$SPEC 2>> /dev/null)
-      TAR_DIR="$PKG_NAME-$PKG_VER"
-      echo "   $TAR_DIR"
-
-      local TAR_HAS_CHANGED
-
-      TAR_HAS_CHANGED=1
-
-      local PATCH_TARGET_DIR=""
-      local EXCLUDE_PATCH_NUM_CSV=""
-      local METHOD=""
-      local RAW_SCRIPT=""
-      local RPMBUILD_BUILD_DIR=""
-      local OUTPUT_FILE="$ROOT_DIR/$PKG_DIR/tarball_extract_result"
-
-      tarball_extract  "$SPEC_DIR"  "$SPEC"  "$SPEC_GIT"  "$SOURCE_DIR"  "$BRANCH"  "$ORIG_BRANCH"  "$TAR_DIR"  "$ROOT_DIR"  "$PKG_DIR"  "$BUILD_DIR"  "$TARGET_ARCH"  "$TIS_PATCH_VER" "$OUTPUT_FILE" "$NO_META_PATCH" "$PBR_VERSION"
-      if [ $? -ne 0 ]; then
-         echo "ERROR: $FUNCNAME (${LINENO}): tarball_extract failed while extracting '$ORIG_SRPM_PATH'"
-         cd $ORIG_DIR
-         return 1
-      fi
-
-      source $OUTPUT_FILE
-
-      # Apply patches named in original spec file... before our meta patches
-      echo "srpm_apply_spec_patches '$SPEC_DIR/$SPEC' '$SOURCE_DIR' '$PATCH_TARGET_DIR' '$EXCLUDE_PATCH_NUM_CSV' $USE_GIT 'ORIGINAL: ' '$METHOD' '$RAW_SCRIPT' '$ROOT_DIR' '$RPMBUILD_BUILD_DIR'  '$SPEC_GIT' '$BRANCH'"
-      srpm_apply_spec_patches "$SPEC_DIR/$SPEC" "$SOURCE_DIR" "$PATCH_TARGET_DIR" "$EXCLUDE_PATCH_NUM_CSV" $USE_GIT "ORIGINAL: " $METHOD "$RAW_SCRIPT" "$ROOT_DIR" "$RPMBUILD_BUILD_DIR"  "$SPEC_GIT" "$BRANCH"
-      if [ $? -ne 0 ]; then
-         cd $ORIG_DIR
-         echo "ERROR: $FUNCNAME (${LINENO}): srpm_apply_spec_patches failed while extracting '$ORIG_SRPM_PATH'"
-         return 1
-      fi
-
-      if [ "$GLOBAL_PATCH_TARGET_DIR" != "$PATCH_TARGET_DIR" ]; then
-         echo "changing PATCH_TARGET_DIR from $PATCH_TARGET_DIR to $GLOBAL_PATCH_TARGET_DIR"
-         PATCH_TARGET_DIR="$GLOBAL_PATCH_TARGET_DIR"
-      fi
-      cd $PATCH_TARGET_DIR
-
-      # Verify we are on the correct branch
-      CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD)
-      if [ "$CURRENT_BRANCH" != "$BRANCH" ]; then
-         echo "git checkout -b $BRANCH"
-         git checkout -b $BRANCH
-         if [ $? -ne 0 ]; then
-            echo "ERROR: $FUNCNAME (${LINENO}): failed to create branch '$BRANCH'"
-            cd $ORIG_DIR
-            return 1
-         fi
-      fi
-
-      # Tag the pre-wrs-patches contents
-      git tag $PRE_WRS_TAG
-      if [ $? -ne 0 ]; then
-         echo "ERROR: $FUNCNAME (${LINENO}): 'git tag' failed for 'rpmbuild'"
-         cd $ORIG_DIR
-         return 1
-      fi
-
-      # Copy content from COPY_LIST if defined
-      if [ "x$COPY_LIST" != "x" ]; then
-         echo "COPY_LIST: $COPY_LIST"
-         cd $WRS_PKG_DIR
-         for p in $COPY_LIST; do
-            echo "COPY_LIST: $p"
-            \cp -L -f -r -v $p $META_PATCH_TARGET_DIR/SOURCES
-            if [ $? -ne 0 ]; then
-               echo "ERROR: $FUNCNAME (${LINENO}): COPY_LIST: file not found: '$p'"
-               cd $ORIG_DIR
-               return 1
-            fi
-         done
-
-         cd - >> /dev/null
-
-         # Add and Commit
-         commit_git "$META_PATCH_TARGET_DIR" "WRS: COPY_LIST content" "$WRS_POST_COPY_TAG"
-         if [ $? -ne 0 ]; then
-            echo "ERROR: $FUNCNAME (${LINENO}): commit_git failed while extracting '$ORIG_SRPM_PATH'"
-            cd $ORIG_DIR
-            return 1
-         fi
-      fi
-
-      # Apply WRS patches
-      if [ "x$NO_META_PATCH" != "x1" ]; then
-         if [ "x$WRS_PKG_DIR" != "x" ]; then
-            # Apply wrs patches to spec file and other meta-data
-            echo "srpm_apply_meta_patches '$META_PATCH_TARGET_DIR' '$WRS_PKG_DIR' $USE_GIT '$ARCH' '$BRANCH'"
-            srpm_apply_meta_patches "$META_PATCH_TARGET_DIR" "$WRS_PKG_DIR" $USE_GIT "$ARCH" "$BRANCH"
-            if [ $? -ne 0 ]; then 
-               cd $ORIG_DIR
-               return 1
-            fi
-
-            RAW_SCRIPT=$ROOT_DIR/$PKG_DIR/raw_script2
-
-            local RPMBUILD_BUILD_DIR2=$ROOT_DIR/$BUILD_DIR/BUILD
-            if [ -d "$RPMBUILD_BUILD_DIR2" ]; then
-                echo "rm -rf RPMBUILD_BUILD_DIR2=$RPMBUILD_BUILD_DIR"
-                \rm -rf "$RPMBUILD_BUILD_DIR2"
-            fi
-            mkdir -p $RPMBUILD_BUILD_DIR2
-
-            echo "srpm_create_raw_extract_script '$SPEC_DIR/$SPEC' '$ROOT_DIR/$PKG_DIR'  '$ROOT_DIR/$BUILD_DIR'  '$TARGET_ARCH' '$TIS_PATCH_VER' '$RAW_SCRIPT'  '$TAR_DIR' '$PBR_VERSION'"
-            srpm_create_raw_extract_script "$SPEC_DIR/$SPEC" "$ROOT_DIR/$PKG_DIR"  "$ROOT_DIR/$BUILD_DIR"  "$TARGET_ARCH" "$TIS_PATCH_VER" "$RAW_SCRIPT"  "$TAR_DIR" "$PBR_VERSION"
-            if [ $? -ne 0 ]; then
-               echo "ERROR: $FUNCNAME (${LINENO}): srpm_create_raw_extract_script post meta-patches failed"
-               cd $ORIG_DIR
-               return 1
-            fi
-
-            if [ -d "$RPMBUILD_BUILD_DIR2" ]; then
-                echo "rm -rf RPMBUILD_BUILD_DIR2=$RPMBUILD_BUILD_DIR"
-                \rm -rf "$RPMBUILD_BUILD_DIR2"
-            fi
-            mkdir -p $RPMBUILD_BUILD_DIR2
-
-            EXTRACT_SCRIPT=$ROOT_DIR/$PKG_DIR/extract_script2
-            echo "raw_create_tarballs_extract_script_post_metapatch '$RAW_SCRIPT'  '$EXTRACT_SCRIPT'  '$RPMBUILD_BUILD_DIR'  '$SPEC_GIT'"
-            raw_create_tarballs_extract_script_post_metapatch "$RAW_SCRIPT"  "$EXTRACT_SCRIPT"  "$RPMBUILD_BUILD_DIR"  "$SPEC_GIT"
-            if [ $? -ne 0 ]; then
-                echo "ERROR: $FUNCNAME (${LINENO}): raw_create_tarballs_extract_script_post_metapatch failed"
-                cd $ORIG_DIR
-                return 1
-            fi
-
-            (
-             source $EXTRACT_SCRIPT
-             RC=$?
-             echo "SRPM_EXTRACT_DIR=$(pwd)"
-             exit $RC
-            ) | tee $EXTRACT_SCRIPT.post.log
-            if [ ${PIPESTATUS[0]} -ne 0 ] ; then
-               echo "ERROR: $FUNCNAME (${LINENO}): Failed in script '$EXTRACT_SCRIPT'"
-               cd $ORIG_DIR
-               return 1
-            fi
-
-
-            # Apply wrs patches named in modified spec file. 
-            echo "srpm_apply_spec_patches '$SPEC_DIR/$SPEC' '$SOURCE_DIR' '$PATCH_TARGET_DIR' '$EXCLUDE_PATCH_NUM_CSV' $USE_GIT 'WRS: ' '$METHOD' '$RAW_SCRIPT' '$ROOT_DIR' '$RPMBUILD_BUILD_DIR'  '$SPEC_GIT' '$BRANCH'"
-            srpm_apply_spec_patches "$SPEC_DIR/$SPEC" "$SOURCE_DIR" "$PATCH_TARGET_DIR" "$EXCLUDE_PATCH_NUM_CSV" $USE_GIT "WRS: " $METHOD "$RAW_SCRIPT" "$ROOT_DIR" "$RPMBUILD_BUILD_DIR"  "$SPEC_GIT" "$BRANCH"
-            if [ $? -ne 0 ]; then
-               echo "ERROR: $FUNCNAME (${LINENO}): srpm_apply_spec_patches failed"
-               cd $ORIG_DIR
-               return 1
-            fi
-         fi
-      fi
-
-   done
-
-   echo "Successfully extracted to: $BUILD_DIR"
-   cd $ORIG_DIR
-   return 0
-}
-
-
-
-
-srpm_apply_spec_patches () {
-   local SPEC_PATH=${1}
-   local PATCH_DIR=${2}
-   local PATCH_TARGET_DIR=${3}
-   local EXCLUDE_PATCH_NUM_CSV=${4}
-   local USE_GIT=${5}
-   local COMMEN_PREFIX=${6}
-   local METHOD=${7}
-   local RAW_SCRIPT=${8}
-   local ROOT_DIR=${9}
-   local RPMBUILD_BUILD_DIR=${10}  
-   local SPEC_GIT=${11}
-   local BRANCH=${12}
-
-
-   local PATCH_NO 
-   local PATCH_NAME 
-   local PATCH 
-   local PATCH_ARGS
-
-   local ORIG_DIR=$(pwd)
-   echo "Applying patches"
-
-   if [ ! -f "$SPEC_PATH" ]; then
-      echo "ERROR: $FUNCNAME (${LINENO}): Can't find spec file at '$SPEC_PATH'"
-      return 1
-   fi
-
-   if [ ! -d "$PATCH_DIR" ]; then
-      echo "ERROR: $FUNCNAME (${LINENO}): Patch directory not found '$PATCH_DIR'"
-      return 1
-   fi
-
-   cd $PATCH_TARGET_DIR
-   if [ $? -ne 0 ]; then
-      echo "ERROR: $FUNCNAME (${LINENO}): Failed to cd to Target directory '$PATCH_TARGET_DIR'"
-      return 1
-   fi
-
-   # Add patches
-   local PL=""
-   if [ "$METHOD" -eq $METHOD_RPMBUILD_SCRIPT ]; then
-      PL=$(raw_patch_order $RAW_SCRIPT $SPEC_PATH)
-      if [ $? -ne 0 ];then
-         echo "ERROR: $FUNCNAME (${LINENO}): raw_patch_order failed on RAW_SCRIPT=$RAW_SCRIPT"
-         return 1
-      fi
-   else
-      grep '^%patch[0-9]* ' $SPEC_PATH > /dev/null
-      if [ $? -eq 0 ];then
-         echo "Using '%patch' method"
-         PL=$(grep '^%patch[0-9]* ' $SPEC_PATH | awk  '{print $1}' | sed 's/^%patch//')
-      else
-         grep '^git am' $SPEC_PATH > /dev/null
-         if [ $? -eq 0 ];then
-            echo "Using 'git am' method"
-            PL=$(grep -i '^[Pp]atch[0-9]*:' $SPEC_PATH | awk -F : '{print $1}' | sed 's/^[Pp]atch//' | sort  --unique --version-sort)
-         else
-            grep '^xargs git am' $SPEC_PATH > /dev/null
-            if [ $? -eq 0 ];then
-               echo "Using 'xargs git am' method"
-               PL=$(grep -i '^[Pp]atch[0-9]*:' $SPEC_PATH | awk -F : '{print $1}' | sed 's/^[Pp]atch//' | sort  --unique --version-sort)
-            else
-               echo "Warning: no known patch apply command"
-            fi
-         fi
-      fi
-   fi
-
-   local PATCH_COUNT
-   if [ "x$PL" != "x" ];then
-      PATCH_COUNT=0
-      for PATCH_NO in $PL ; do
-         PATCH_COUNT=$((PATCH_COUNT + 1))
-         local EXCLUDED=0
-         for EXCLUDE_PATCH_NO in $(echo $EXCLUDE_PATCH_NUM_CSV | tr ',' ' '); do
-             if [ $EXCLUDE_PATCH_NO == $PATCH_NO ]; then
-                 EXCLUDED=1
-                 break
-             fi
-         done
-
-         if [ $EXCLUDED -eq 1 ]; then
-            echo "   Exclude Patch$PATCH_NO"
-            continue
-         fi
-
-         local PATCH_NM
-         PATCH_NM="Patch$PATCH_NO"
-         echo "   $PATCH_NM"
-
-         if [ "$METHOD" -eq $METHOD_RPMBUILD_SCRIPT ]; then
-            PATCH_NAME=$(raw_extract_patch_file $RAW_SCRIPT $PATCH_NO $SPEC_PATH)
-         else
-            PATCH_NAME=$(spec_find_tag $PATCH_NM $SPEC_PATH 2>> /dev/null | awk -F / '{print $NF}')
-            if [ "x$PATCH_NAME" == "x" ]; then
-               PATCH_NM="patch$PATCH_NO"
-               echo "   $PATCH_NM"
-               PATCH_NAME=$(spec_find_tag $PATCH_NM $SPEC_PATH 2>> /dev/null | awk -F / '{print $NF}')
-            fi
-         fi
-
-         echo "      $PATCH_NAME"
-         PATCH="$PATCH_DIR/$PATCH_NAME"
-
-         if [ "$METHOD" -eq $METHOD_RPMBUILD_SCRIPT ]; then
-            PATCH_ARGS="-p1"
-         else
-            PATCH_ARGS=$(spec_find_patch_args "$PATCH_NM" "$SPEC_PATH")
-         fi
-
-         echo "srpm_apply_patch '$PATCH' '$PATCH_ARGS' '$PATCH_TARGET_DIR' '$USE_GIT' '$COMMEN_PREFIX$PATCH_NM: ' '$METHOD' '$RAW_SCRIPT' '$ROOT_DIR' '$RPMBUILD_BUILD_DIR' '$SPEC_GIT' '$PATCH_NO' '$BRANCH' '$SPEC_PATH' '$PATCH_COUNT'"
-         srpm_apply_patch "$PATCH" "$PATCH_ARGS" "$PATCH_TARGET_DIR" $USE_GIT "$COMMEN_PREFIX$PATCH_NM: " $METHOD "$RAW_SCRIPT" "$ROOT_DIR" "$RPMBUILD_BUILD_DIR" "$SPEC_GIT" "$PATCH_NO" "$BRANCH" "$SPEC_PATH" $PATCH_COUNT
-         if [ $? -ne 0 ]; then
-            echo "ERROR: $FUNCNAME (${LINENO}): failed to apply patch '$PATCH'"
-            cd $ORIG_DIR
-            return 1
-         fi
-      done
-   fi
-
-   cd $ORIG_DIR
-   return 0
-}
-
-
-
-srpm_apply_patch() {
-   local PATCH="${1}"
-   local PATCH_ARGS="${2}"
-   local TARGET_DIR="${3}"
-   local USE_GIT="${4}"
-   local COMMENT_PREFIX="${5}"
-   local METHOD=${6}
-   local RAW_SCRIPT=${7}
-   local ROOT_DIR=${8}
-   local RPMBUILD_BUILD_DIR=${9}
-   local SPEC_GIT=${10}
-   local PATCH_NO="${11}"
-   local BRANCH="${12}"
-   local SPEC_PATH="${13}"
-   local PATCH_COUNT_TARGET="${14}"
-
-
-   # echo "srpm_apply_patch: PATCH=$PATCH  PATCH_ARGS=$PATCH_ARGS  TARGET_DIR=$TARGET_DIR  USE_GIT=$USE_GIT  COMMENT_PREFIX=$COMMENT_PREFIX  METHOD=$METHOD  RAW_SCRIPT=$RAW_SCRIPT  ROOT_DIR=$ROOT_DIR  RPMBUILD_BUILD_DIR=$RPMBUILD_BUILD_DIR  SPEC_GIT=$SPEC_GIT  PATCH_NO=$PATCH_NO"
-   local ORIG_DIR
-   ORIG_DIR=$(pwd)
-
-   if [ ! -f $PATCH ]; then
-      echo "ERROR: $FUNCNAME (${LINENO}): Patch '$PATCH' not found"
-      return 1
-   fi
-
-   if [ "x$TARGET_DIR" == "x" ]; then
-      TARGET_DIR="$ORIG_DIR"
-   fi
-
-   if [ ! -d $TARGET_DIR ]; then
-      echo "ERROR: $FUNCNAME (${LINENO}): Directory '$TARGET_DIR' not found"
-      return 1
-   fi
-
-   if [ $USE_GIT -gt 0 ]; then
-      if [ ! -d "$TARGET_DIR/.git" ] && [ ! -d "$TARGET_DIR/../.git" ]; then
-         echo "ERROR: $FUNCNAME (${LINENO}): Directory '$TARGET_DIR' is not managed by git"
-         return 1
-      fi
-   fi
-
-   cd "$TARGET_DIR"
-   if [ $? -ne 0 ]; then
-      echo "ERROR: $FUNCNAME (${LINENO}): Failed to cd to '$TARGET_DIR'"
-      return 1
-   fi
-
-   local TAG="v$BRANCH"
-   local PFN=$(basename $PATCH)
-
-   local MSG="$PFN"
-   local HASH=""
-   local ADD_OUT
-   local ADD_WC
-
-   if [ $USE_GIT -gt 0 ]; then
-      HASH=$(git log --pretty=format:'%H' --grep="$MSG\$")
-   fi
-
-   if [ "x$HASH" == "x" ]; then
-      if [ $USE_GIT -gt 0 ]; then
-         # Verify we are on the correct branch
-         CURRENT_BRANCH=$(git rev-parse --abbrev-ref HEAD)
-         if [ "$CURRENT_BRANCH" != "$BRANCH" ]; then
-            echo "git checkout $TAG"
-            git checkout $TAG
-            if [ $? -ne 0 ]; then
-               echo "ERROR: $FUNCNAME (${LINENO}): failed to checkout tag '$TAG'"
-            fi
-
-            echo "git checkout -b $BRANCH"
-            git checkout -b $BRANCH
-            if [ $? -ne 0 ]; then
-               echo "ERROR: $FUNCNAME (${LINENO}): failed to create branch '$BRANCH'"
-               cd $ORIG_DIR
-               return 1
-            fi
-         fi
-      fi
-
-      if [ $METHOD -eq $METHOD_RPMBUILD_SCRIPT ]; then
-         local PATCH_SCRIPT=$(dirname $RAW_SCRIPT)/patch_script
-         echo "raw_create_patch_apply_script   $RAW_SCRIPT  $PATCH_NO $PATCH_SCRIPT  $RPMBUILD_BUILD_DIR  $SPEC_GIT  $SPEC_PATH  $PATCH_COUNT_TARGET"
-         raw_create_patch_apply_script   $RAW_SCRIPT  $PATCH_NO $PATCH_SCRIPT  $RPMBUILD_BUILD_DIR  $SPEC_GIT  $SPEC_PATH $PATCH_COUNT_TARGET
-         if [ $? -ne 0 ]; then
-            echo "ERROR: $FUNCNAME (${LINENO}): raw_create_patch_apply_script failed"
-            cd $ORIG_DIR
-            return 1
-         fi
-
-         if [ -f $PATCH_SCRIPT ]; then
-            echo "source $PATCH_SCRIPT"
-            (
-             source $PATCH_SCRIPT
-            )
-            if [ $? -ne 0 ]; then
-               echo "ERROR: $FUNCNAME (${LINENO}): Failed to apply patch '$PATCH' using script '$PATCH_SCRIPT'"
-               return 1
-            fi
-         else
-            echo "ERROR: $FUNCNAME (${LINENO}): file not found at PATCH_SCRIPT=$PATCH_SCRIPT"
-            cd $ORIG_DIR
-            return 1
-         fi
-      else
-         echo "patch $PATCH_ARGS < $PATCH"
-         patch -f $PATCH_ARGS --no-backup-if-mismatch < $PATCH
-         if [ $? -ne 0 ]; then
-            echo "failed to apply patch '$PATCH'"
-            return 1
-         fi
-      fi
-
-      if [ $PWD = $HOME ]; then
-          echo "DPENNEY: in the home dir somehow"
-          return 1
-      fi
-
-      if [ $? -eq 0 ]; then
-         if [ $USE_GIT -gt 0 ]; then
-            ADD_OUT=$(git add --all --verbose)
-            if [ $? -ne 0 ]; then
-               echo "ERROR: $FUNCNAME (${LINENO}): 'git add' failed for patch '$PATCH' of '$SPEC' while extracting '$ORIG_SRPM_PATH'"
-               cd $ORIG_DIR
-               return 1
-            fi
-
-            ADD_WC=$(git status porcelain | wc -l)
-            if [ $ADD_WC -gt 0 ]; then
-               # The kernel-rt has an empty "test patch", so use --allow-empty
-               git commit --allow-empty -m "$COMMENT_PREFIX$PFN"
-               if [ $? -ne 0 ]; then
-                  echo "ERROR: $FUNCNAME (${LINENO}): 'git commit' failed for patch '$PATCH' of '$SPEC' while extracting '$ORIG_SRPM_PATH'"
-                  cd $ORIG_DIR
-                  return 1
-               fi
-            fi
-         fi
-      else
-         echo "ERROR: $FUNCNAME (${LINENO}): Failed patch: $MSG"
-         cd $ORIG_DIR
-         return 1
-      fi
-   else
-      echo "Patch already applied: $MSG"
-      if [ $USE_GIT -gt 0 ]; then
-         git tag -d $TAG
-         git tag $TAG $HASH
-         echo "git tag $TAG $HASH == $?"
-      fi
-   fi
-
-   cd $ORIG_DIR
-   return 0
-}
-
-
-srpm_find_tag () {
-   local TAG=$1
-   local SRPM_FILE=$2
-
-   local VALUE=$(rpm -q --queryformat "%{$TAG}\n" --nosignature -p $SRPM_FILE)
-   if [ $? -ne 0 ]; then
-      echo ""
-      return 1
-   fi
-
-   echo "$VALUE"
-   return 0
-}
-
-
-srpm_list_packages () {
-   local SRPM_FILE=$1
-
-   local TMPDIR=$(mktemp -d /tmp/srpm_list_packages_XXXXXX)
-
-   (
-    cd $TMPDIR &>> /dev/null
-    # rpm2cpio $SRPM_FILE | cpio -civ '*.spec' &>> /dev/null
-    rpm -i --root=$TMPDIR --nosignature $SRPM_FILE
-   )
-
-   for SPEC in $(find $TMPDIR -name '*.spec' | sort -V); do
-      spec_list_packages $SPEC
-   done
-    
-   \rm -rf $TMPDIR &>> /dev/null
-}
-
-
-srpm_list_versioned_packages () {
-   local SRPM_FILE=$1
-
-   local TMPDIR=$(mktemp -d /tmp/srpm_list_packages_XXXXXX)
-
-   (
-    cd $TMPDIR &>> /dev/null
-    # rpm2cpio $SRPM_FILE | cpio -civ '*.spec' &>> /dev/null
-    rpm -i --root=$TMPDIR --nosignature $SRPM_FILE
-   )
-
-   for SPEC in $(find $TMPDIR -name '*.spec' | sort -V); do
-      spec_list_versioned_packages $SPEC
-   done
-
-   \rm -rf $TMPDIR &>> /dev/null
-}
-
-
-srpm_list_ver_rel_packages () {
-   local SRPM_FILE=$1 
-   
-   local TMPDIR=$(mktemp -d /tmp/srpm_list_packages_XXXXXX)
-   
-   (
-    cd $TMPDIR &>> /dev/null
-    # rpm2cpio $SRPM_FILE | cpio -civ '*.spec' &>> /dev/null
-    rpm -i --root=$TMPDIR --nosignature $SRPM_FILE
-   )         
-   
-   for SPEC in $(find $TMPDIR -name '*.spec' | sort -V); do
-      spec_list_ver_rel_packages $SPEC
-   done  
-
-   \rm -rf $TMPDIR &>> /dev/null
-}
-
-
-srpm_list_ver_rel_arch_packages () {
-   local SRPM_FILE=$1 
-   
-   local TMPDIR=$(mktemp -d /tmp/srpm_list_packages_XXXXXX)
-   
-   (
-    cd $TMPDIR &>> /dev/null
-    # rpm2cpio $SRPM_FILE | cpio -civ '*.spec' &>> /dev/null
-    rpm -i --root=$TMPDIR --nosignature $SRPM_FILE
-   )         
-   
-   for SPEC in $(find $TMPDIR -name '*.spec' | sort -V); do
-      spec_list_ver_rel_arch_packages $SPEC
-   done  
-
-   \rm -rf $TMPDIR &>> /dev/null
-}
-
-
-srpm_build_requires () {
-   local SRPM_FILE=$1
-
-   local TMPDIR=$(mktemp -d /tmp/srpm_list_packages_XXXXXX)
-
-   (
-    cd $TMPDIR &>> /dev/null
-    # rpm2cpio $SRPM_FILE | cpio -civ '*.spec' &>> /dev/null
-    rpm -i --root=$TMPDIR $SRPM_FILE
-   )
-
-   for SPEC in $(find $TMPDIR -name '*.spec' | sort -V); do
-      spec_build_requires $SPEC
-   done
-
-   \rm -rf $TMPDIR &>> /dev/null
-}
-
-
-srpm_match_package_list () {
-   local Aname=$1[@]
-   local TARGET_LIST=("${!Aname}")
-   local SRPM_FILE=$2
-   local TARGET
-   local PKG_NAME
-
-   for PKG_NAME in $(srpm_list_packages "$SRPM_FILE"); do
-       for TARGET in "${TARGET_LIST[@]}"; do
-           if [ "$PKG_NAME" == "$TARGET" ]; then
-               >&2 echo "found target '$TARGET' in file '$SRPM_FILE' as a package name"
-               echo "$TARGET"
-               return 0
-           fi
-       done
-   done
-
-   return 1
-}
-
-srpm_match_package () {
-   local TARGET=$1
-   local SRPM_FILE=$2
-   local PKG_NAME
-
-   for PKG_NAME in $(srpm_list_packages "$SRPM_FILE"); do
-       if [ "$PKG_NAME" == "$TARGET" ]; then
-           echo "found target '$TARGET' in file '$SRPM_FILE' as a package name"
-           return 0
-       fi
-   done
-
-   return 1
-}
-
-
-srpm_match_target_list () {
-   local Aname=$1[@]
-   local TARGET_LIST=("${!Aname}")
-   local SRPM_FILE=$2
-   local TARGET
-   local NAME
-   local SERVICE
-   local PKG_NAME
-
-   NAME=$(srpm_find_tag Name "$SRPM_FILE")
-   if [ $? -eq 0 ]; then
-       for TARGET in "${TARGET_LIST[@]}"; do
-           if [ "$NAME" == "$TARGET" ]; then
-               echo $TARGET
-               return 0
-           fi
-           if [ "$BUILD_TYPE" == "rt" ]; then
-               if [ "${NAME}-rt" == "$TARGET" ]; then
-                   echo $TARGET
-                   return 0
-               fi
-           fi
-       done
-   fi
-
-   SERVICE=$(srpm_find_tag Service "$SRPM_FILE")
-   if [ $? -eq 0 ]; then
-       for TARGET in "${TARGET_LIST[@]}"; do
-           if [ "$SERVICE" == "$TARGET" ]; then
-               echo $TARGET
-               return 0
-           fi
-       done
-   fi
-
-   srpm_match_package_list TARGET_LIST "$SRPM_FILE"
-   if [ $? -eq 0 ]; then
-       return 0
-   fi
-
-   return 1
-}
-
-srpm_match_target () {
-   local TARGET=$1
-   local SRPM_FILE=$2
-   local NAME
-   local SERVICE
-   local PKG_NAME
-
-   NAME=$(srpm_find_tag Name "$SRPM_FILE")
-   if [ $? -eq 0 ]; then
-       if [ "$NAME" == "$TARGET" ]; then
-           echo "found target '$TARGET' in file '$SRPM_FILE' as a name"
-           return 0
-       fi
-   fi
-
-   SERVICE=$(srpm_find_tag Service "$SRPM_FILE")
-   if [ $? -eq 0 ]; then
-       if [ "$SERVICE" == "$TARGET" ]; then
-           echo "found target '$TARGET' in file '$SRPM_FILE' as a service"
-           return 0
-       fi
-   fi
-
-   srpm_match_package "$TARGET" "$SRPM_FILE"
-   if [ $? -eq 0 ]; then
-       return 0
-   fi
-
-   return 1
-}
-
-# The intent of this function is to calculate the number of commits between the
-# base srcrev and the top-most commit.  This is only meant to be used at the
-# top level of a subgit; not a subdirectory within a git tree.
-#
-srpm_git_revision_count () {
-    local SRC_DIR=$1
-    local BASE_SRCREV=$2
-    local -i COUNT=0
-    local -i DIRTY=0
-
-    pushd $SRC_DIR > /dev/null
-    COUNT=$(git rev-list --count $BASE_SRCREV..HEAD)
-    if [ $? -ne 0 ]; then
-        popd > /dev/null
-        return 1
-    fi
-    DIRTY=$(git status --porcelain | wc -l)
-    if [ "$DIRTY" -ne 0 ]; then
-        # add an extra value for uncommitted work.
-        COUNT=$((COUNT+1))
-    fi
-    popd > /dev/null
-
-    echo $COUNT
-    return 0
-}
-
-# Calculate a folder-specific GITREVCOUNT
-srpm_git_revision_count_pkg () {
-    local SRC_DIR=$1
-    local BASE_SRCREV=$2
-    local -i COUNT=0
-    local -i DIRTY=0
-
-    pushd $SRC_DIR > /dev/null
-    if [ -z "${BASE_SRCREV}" ]; then
-        COUNT=$(git rev-list --count HEAD -- .)
-    else
-        COUNT=$(git rev-list --count $BASE_SRCREV..HEAD -- .)
-    fi
-    if [ $? -ne 0 ]; then
-        popd > /dev/null
-        return 1
-    fi
-    DIRTY=$(git status --porcelain . | wc -l)
-    if [ "$DIRTY" -ne 0 ]; then
-        # add an extra value for uncommitted work.
-        COUNT=$((COUNT+1))
-    fi
-    popd > /dev/null
-
-    echo $COUNT
-    return 0
-}
-
-srpm_pbr_version () {
-    local SRC_DIR=$1
-    local VER
-
-    pushd $SRC_DIR > /dev/null
-    VER=$(python setup.py -q rpm_version)
-    if [ $? -ne 0 ]; then
-        return 1
-    fi
-    popd > /dev/null
-
-    echo $VER
-    return 0
-}
-
-srpm_git_revision_count_list () {
-    local SRC_DIR="${1}" ; shift
-    local BASE_SRCREV="${1}" ; shift
-    local -i COUNT=0
-    local -i DIRTY=0
-
-    if [ $# -eq 0 ]; then
-        echo 0
-        return 0
-    fi
-
-    pushd $SRC_DIR > /dev/null
-    if [ -z "${BASE_SRCREV}" ]; then
-        COUNT=$(git rev-list --count HEAD -- $@)
-    else
-        COUNT=$(git rev-list --count $BASE_SRCREV..HEAD -- $@)
-    fi
-    if [ $? -ne 0 ]; then
-        popd > /dev/null
-        return 1
-    fi
-    DIRTY=$(git status --porcelain $@ | wc -l)
-    if [ "$DIRTY" -ne 0 ]; then
-        # add an extra value for uncommitted work.
-        COUNT=$((COUNT+1))
-    fi
-    popd > /dev/null
-
-    echo $COUNT
-    return 0
-}
-
-srpm_canonical_path_single () {
-    local path="$1"
-    local canonical_path
-
-    if [[ "${path}" =~ /stx/downloads/|/centos-repo/|/cgcs-centos-repo/ ]]; then
-        # Expand all but final symlink.
-        # These symlinks often point outside of the source code repository.
-        canonical_path="$(readlink -f "$(dirname "${path}")")"
-        if [ $? -ne 0 ]; then
-            >&2  echo "ERROR: $FUNCNAME (${LINENO}): readlink -f '${path}'"
-            return 1
-        fi
-        canonical_path+="/$(basename "${path}")"
-    else
-        # expand all symlinks
-        canonical_path="$(readlink -f "${path}")"
-        if [ $? -ne 0 ]; then
-            >&2  echo "ERROR: $FUNCNAME (${LINENO}): readlink -f '${path}'"
-            return 1
-        fi
-    fi
-
-    echo "${canonical_path}"
-}
-
-srpm_canonical_path () {
-    local path
-    if [ $# -eq 0 ] ; then
-        while IFS= read -r path; do
-            srpm_canonical_path_single "${path}" || return 1
-        done
-    else
-        while [ $# -ne 0 ] ; do
-            srpm_canonical_path_single "${1}" || return 1
-            shift
-        done
-    fi
-}
-
-#
-# Write to a file the list of input for a package.
-# Assumes PKG_BASE is defined, and build_srpm.data file has been sourced.
-#
-srpm_source_list () {
-    local SRC_BUILD_TYPE="$1"
-    local SRPM_OR_SPEC_PATH="$2"
-    local INPUT_FILES_SORTED="$3"
-
-    local INPUT_FILES
-
-    if [ -z "${INPUT_FILES_SORTED}" ]; then
-        >&2  echo "ERROR: $FUNCNAME (${LINENO}): missing arguement"
-        return 1
-    fi
-
-    INPUT_FILES="$(mktemp --tmpdir input_files_XXXXXX)"
-
-    # Create lists of input files (INPUT_FILES) and symlinks (INPUT_LINKS).
-    # First elements are absolute paths...
-    srpm_canonical_path "${PKG_BASE}" > "${INPUT_FILES}"
-    if [ $? -ne 0 ]; then
-        >&2  echo "ERROR: $FUNCNAME (${LINENO}): srpm_canonical_path PKG_BASE='${PKG_BASE}'"
-        \rm "${INPUT_FILES}"
-        return 1
-    fi
-
-    if [ "${SRC_BUILD_TYPE}" == "${SRC_BUILD_TYPE_SRPM}" ]; then
-        srpm_canonical_path "${SRPM_OR_SPEC_PATH}" >> "${INPUT_FILES}"
-        if [ $? -ne 0 ]; then
-            >&2  echo "ERROR: $FUNCNAME (${LINENO}): srpm_canonical_path SRPM_OR_SPEC_PATH='$SRPM_OR_SPEC_PATH'"
-            \rm "${INPUT_FILES}"
-            return 1
-        fi
-    fi
-
-    # ...additional elements are based on values already sourced from
-    # build_srpm.data (COPY_LIST, SRC_DIR, COPY_LIST_TO_TAR, OPT_DEP_LIST)
-    # and may be relative to $PKG_BASE
-    #
-    # Use a subshell so any directory changes have no lasting effect.
-
-    (
-        cd "${PKG_BASE}"
-        if [ "x${COPY_LIST}" != "x" ]; then
-            srpm_canonical_path ${COPY_LIST} >> "${INPUT_FILES}"
-            if [ $? -ne 0 ]; then
-                >&2  echo "ERROR: $FUNCNAME (${LINENO}): srpm_canonical_path COPY_LIST='${COPY_LIST}'"
-                return 1
-            fi
-        fi
-
-        if [ "${SRC_BUILD_TYPE}" == "${SRC_BUILD_TYPE_SPEC}" ]; then
-            if [ "x${SRC_DIR}" != "x" ]; then
-                srpm_canonical_path "${SRC_DIR}" >> "${INPUT_FILES}"
-            fi
-
-            if [ "x${COPY_LIST_TO_TAR}" != "x" ]; then
-                srpm_canonical_path ${COPY_LIST_TO_TAR} >> "${INPUT_FILES}"
-                if [ $? -ne 0 ]; then
-                    >&2  echo "ERROR: $FUNCNAME (${LINENO}): srpm_canonical_path COPY_LIST_TO_TAR='${COPY_LIST_TO_TAR}'"
-                    return 1
-                fi
-            fi
-        fi
-
-        if [ "x${OPT_DEP_LIST}" != "x" ]; then
-            srpm_canonical_path ${OPT_DEP_LIST} >> "${INPUT_FILES}" 2> /dev/null || true
-        fi
-
-        if [ "x$BUILD_TYPE" != "x" ]; then
-            if [ "x${OPT_DEP_LIST_FOR_BUILD_TYPE[$BUILD_TYPE]}" != "x" ]; then
-                srpm_canonical_path ${OPT_DEP_LIST_FOR_BUILD_TYPE[$BUILD_TYPE]} >> "${INPUT_FILES}" 2> /dev/null || true
-            fi
-        fi
-    )
-
-    if [ $? -ne 0 ]; then
-        \rm "${INPUT_FILES}"
-        return 1
-    fi
-
-    # Create sorted, unique list of canonical paths
-    cat "${INPUT_FILES}" | sort --unique > "${INPUT_FILES_SORTED}"
-
-    \rm "${INPUT_FILES}"
-}
-
-
-#
-# Write to a file the list of input files for a package.
-# Assumes PKG_BASE is defined, and build_srpm.data file has been sourced.
-#
-srpm_source_file_list () {
-    local SRC_BUILD_TYPE="$1"
-    local SRPM_OR_SPEC_PATH="$2"
-    local INPUT_FILES_SORTED="$3"
-
-    LINK_FILTER='\([/]stx[/]downloads[/]\|[/]centos-repo[/]\|[/]cgcs-centos-repo[/]\)'
-    local INPUT_FILES
-    local INPUT_LINKS
-    local INPUT_SOURCES
-    local path
-
-    if [ -z "${INPUT_FILES_SORTED}" ]; then
-        >&2  echo "ERROR: $FUNCNAME (${LINENO}): missing arguement"
-        return 1
-    fi
-
-
-    INPUT_SOURCES="$(mktemp --tmpdir input_sources_XXXXXX)"
-    INPUT_FILES="$(mktemp --tmpdir input_files_XXXXXX)"
-    INPUT_LINKS="$(mktemp --tmpdir input_links_XXXXXX)"
-
-    srpm_source_list "${SRC_BUILD_TYPE}" "${SRPM_OR_SPEC_PATH}" "${INPUT_SOURCES}"
-
-    # Create lists of input files (INPUT_FILES) and symlinks (INPUT_LINKS).
-    # First elements are absolute paths...
-    while read path; do
-        find "${path}" ! -path '*/.git/*' ! -path '*/.tox/*' -type f >> $INPUT_FILES
-        find "${path}" ! -path '*/.git/*' ! -path '*/.tox/*' -type l >> $INPUT_LINKS
-    done < "${INPUT_SOURCES}"
-
-    # Create sorted, unique list of canonical paths
-    (
-        while IFS= read -r path; do
-            srpm_canonical_path "${path}"
-        done < "${INPUT_FILES}"
-
-        while IFS= read -r path; do
-            link_path="$(srpm_canonical_path "${path}")"
-            # only report the path if it points to a file
-            if [ -f ${link_path} ]; then
-                echo "${link_path}"
-            fi
-        done < "${INPUT_LINKS}"
-    ) | sort --unique > "${INPUT_FILES_SORTED}"
-
-    \rm "${INPUT_FILES}" "${INPUT_SOURCES}"
-}
-
-srpm_source_build_data () {
-    local DATA_FILE="$1"
-    local SRC_BUILD_TYPE="$2"
-    local SRPM_OR_SPEC_PATH="$3"
-
-    if [ ! -f $DATA_FILE ]; then
-        >&2 echo "ERROR: $FUNCNAME (${LINENO}): $DATA_FILE not found"
-        return 1
-    fi
-
-    unset SRC_DIR
-    unset COPY_LIST
-    unset COPY_LIST_TO_TAR
-    unset OPT_DEP_LIST
-    unset OPT_DEP_LIST_FOR_BUILD_TYPE
-
-    unset TIS_PATCH_VER
-    unset PBR_VERSION
-    unset BUILD_IS_BIG
-    unset BUILD_IS_SLOW
-
-    unset PKG_BASE_SRCREV
-    unset SRC_BASE_SRCREV
-    unset TIS_BASE_SRCREV
-    unset BASE_SRCREV_FOR_PATH
-    unset ABS_BASE_SRCREV_FOR_PATH
-
-    declare -g SRC_DIR
-    declare -g COPY_LIST
-    declare -g COPY_LIST_TO_TAR
-    declare -g OPT_DEP_LIST
-    declare -g -A OPT_DEP_LIST_FOR_BUILD_TYPE
-
-    declare -g TIS_PATCH_VER
-    declare -g PBR_VERSION
-    declare -g BUILD_IS_BIG
-    declare -g BUILD_IS_SLOW
-
-    declare -g PKG_BASE_SRCREV
-    declare -g SRC_BASE_SRCREV
-    declare -g TIS_BASE_SRCREV
-    declare -g -A BASE_SRCREV_FOR_PATH
-    declare -g -A ABS_BASE_SRCREV_FOR_PATH
-
-    BUILD_IS_BIG=0
-    BUILD_IS_SLOW=0
-
-    source $DATA_FILE
-
-    # Hope to phase out TIS_BASE_SRCREV in favor of SRC_BASE_SRCREV,
-    # but will need this for backward compatibility during the transition.
-    if [ -z ${SRC_BASE_SRCREV} ] && ! [ -z ${TIS_BASE_SRCREV} ]; then
-        SRC_BASE_SRCREV=${TIS_BASE_SRCREV}
-    fi
-
-    for path in ${!BASE_SRCREV_FOR_PATH[@]}; do
-        abs_path="$(readlink -f "${path}")"
-        ABS_BASE_SRCREV_FOR_PATH[${abs_path}]=${BASE_SRCREV_FOR_PATH[${path}]}
-    done
-
-    # TIS_PATCH_VER is mandatory
-    if [ -z "$TIS_PATCH_VER" ] && [ -z "$PBR_VERSION" ]; then
-        >&2 echo "ERROR: $FUNCNAME (${LINENO}): TIS_PATCH_VER or PBR_VERSION must be set in $DATA_FILE"
-        return 1
-    elif [[ "$PBR_VERSION" == "auto" ]]; then
-       TIS_PATCH_VER="0"
-       if [ ! -d "$SRC_DIR" ]; then
-          >&2 echo "ERROR: $FUNCNAME (${LINENO}): SRC_DIR must specify a subgit root path"
-          return 1
-       fi
-       PBR_VERSION=$(srpm_pbr_version $SRC_DIR)
-       if [ $? -ne 0 ] || [ "$PBR_VERSION" == "" ]; then
-          >&2 echo "ERROR: $FUNCNAME (${LINENO}): Invalid PBR_VERSION '$PBR_VERSION'"
-          return 1
-       fi
-    elif [[ "${TIS_PATCH_VER}" =~ [^0-9] ]]; then
-        # Expand TIS_PATCH_VER with supported variables
-        local -i PKG_GITREVCOUNT=0
-        local -i GITREVCOUNT=0
-        local varname
-
-        for varname in ${TIS_PATCH_VER//[+-]/ }; do
-            if [ "${varname}" = "PKG_GITREVCOUNT" ]; then
-                # Calculate PKG_GITREVCOUNT, with optional PKG_BASE_SRCREV
-                PKG_GITREVCOUNT=$(srpm_git_revision_count_pkg $PKG_BASE $PKG_BASE_SRCREV)
-                if [ $? -ne 0 ]; then
-                    >&2 echo "ERROR: $FUNCNAME (${LINENO}): Failed to calculate PKG_GITREVCOUNT"
-                    return 1
-                fi
-            elif [ "${varname}" = "GITREVCOUNT" ] || [ "${varname}" = "SRC_GITREVCOUNT" ]; then
-                # Calculate GITREVCOUNT
-                if [ -z "$SRC_BASE_SRCREV" ]; then
-                    >&2 echo "ERROR: $FUNCNAME (${LINENO}): SRC_BASE_SRCREV must be set in $DATA_FILE"
-                    return 1
-                fi
-
-                SRC_GITREVCOUNT=$(srpm_git_revision_count $SRC_DIR $SRC_BASE_SRCREV)
-                if [ $? -ne 0 ]; then
-                    >&2 echo "ERROR: $FUNCNAME (${LINENO}): Failed to calculate ${varname}"
-                    return 1
-                fi
-
-                GITREVCOUNT=${SRC_GITREVCOUNT}
-            elif [ "${varname}" = "OTHER_GITREVCOUNT" ]; then
-                OTHER_GITREVCOUNT=0
-                local git_root
-
-                local temp_list
-                local temp_list_filtered
-                local temp_list_git_filtered
-
-                temp_list="$(mktemp --tmpdir srpm_src_list_XXXXXX)"
-                temp_list_filtered="$(mktemp --tmpdir srpm_src_list_filtered_XXXXXX)"
-                temp_list_git_filtered="$(mktemp --tmpdir srpm_src_list_git_filtered_XXXXXX)"
-
-                # Collect a list of inputs files and directories
-                srpm_source_list "${SRC_BUILD_TYPE}" "${SRPM_OR_SPEC_PATH}" "${temp_list}"
-
-                # Create a filtered list of input files and directoies, excluding stuff under $PKG_BASE and $SRC_DIR
-                if [ "${SRC_DIR}" == "" ]; then
-                    grep -v "^$(readlink -f "${PKG_BASE}")" "${temp_list}" > "${temp_list_filtered}"
-                else
-                    grep -v "^$(readlink -f "${PKG_BASE}")" "${temp_list}" | grep -v "^$(readlink -f "${SRC_DIR}")" > "${temp_list_filtered}"
-                fi
-
-                for git_root in $GIT_LIST; do
-                    local abs_git_root
-                    local SRCREV=""
-                    local path
-                    local git_rev_count=0
-
-                    # Further filter the list of inputs to just those from a particular git
-                    abs_git_root="$(readlink -f "${git_root}")"
-                    cat "${temp_list_filtered}" | grep "^${abs_git_root}" > "${temp_list_git_filtered}"
-
-                    # If not inputs for this git, skip to the next git
-                    if [ $(cat "${temp_list_git_filtered}" | wc -l) -eq 0 ]; then
-                        continue
-                    fi
-
-                    # If there is exactly one input listed for the git, then there are a few special options.
-                    # If the path matches a dictionary key of BASE_SRCREV_FOR_PATH, then pull the SRCREV
-                    # from BASE_SRCREV_FOR_PATH.  Further, if that SRCREV is "OTHER_PKG_BASE_SRCREV", then
-                    # assume that path is a PKG_BASE for another package, and try to extract the
-                    # PKG_BASE_SRCREV=xxx value from the build_srpm.data of that package.
-                    if [ $(cat "${temp_list_git_filtered}" | wc -l) -eq 1 ]; then
-                        path=$(head -n 1 "${temp_list_git_filtered}")
-                        SRCREV=${ABS_BASE_SRCREV_FOR_PATH[${path}]}
-                        if [ "${SRCREV}" == "OTHER_PKG_BASE_SRCREV" ] && [ -f ${path}/${DISTRO}/build_srpm.data ] ; then
-                            SRCREV=$(grep PKG_BASE_SRCREV= ${path}/${DISTRO}/build_srpm.data | sed 's#PKG_BASE_SRCREV=##')
-                            if [ -z ${SRCREV} ]; then
-                                >&2 echo "ERROR: $FUNCNAME (${LINENO}): Tried to evaluate 'OTHER_PKG_BASE_SRCREV', but failed to extract 'PKG_BASE_SRCREV' from '${path}/${DISTRO}/build_srpm.data'"
-                                return 1
-                            fi
-                        fi
-                    fi
-
-                    if [ -z "${SRCREV}" ]; then
-                        SRCREV=${ABS_BASE_SRCREV_FOR_PATH[${abs_git_root}]}
-                    fi
-
-                    git_rev_count=$(srpm_git_revision_count_list "${abs_git_root}" "${SRCREV}" $(cat "${temp_list_git_filtered}"))
-                    OTHER_GITREVCOUNT=$((OTHER_GITREVCOUNT+git_rev_count))
-                done
-
-                \rm "${temp_list}"
-                \rm "${temp_list_filtered}"
-                \rm "${temp_list_git_filtered}"
-
-            elif [[ "${varname}" =~ [^0-9] ]]; then
-                # TIS_PATCH_VER has some unsupported var or characters
-                >&2 echo "ERROR: $FUNCNAME (${LINENO}): Unsupported value in TIS_PATCH_VER: ${varname}"
-                return 1
-            fi
-        done
-
-        # Bash will expand the supported variables defined above, and perform any arithmetic,
-        # using the $((...)) syntax.
-        # So TIS_PATCH_VER=GITREVCOUNT+PKG_GITREVCOUNT+2, where:
-        # - GITREVCOUNT evaluates to 20
-        # - PKG_GITREVCOUNT evaluates to 15
-        # will result in TIS_PATCH_VER=37 when Bash evaluates the following:
-        #
-        TIS_PATCH_VER=$((TIS_PATCH_VER))
-    fi
-
-     # to avoid mockbuild error
-     PBR_VERSION=${PBR_VERSION:=NA}
-
-    return 0
-}
diff --git a/build-tools/stx/discovery.py b/build-tools/stx/discovery.py
index 3de7d489..dc4b35fd 100644
--- a/build-tools/stx/discovery.py
+++ b/build-tools/stx/discovery.py
@@ -26,7 +26,7 @@ LAYER_PRIORITY_DEFAULT = 99
 BUILD_TYPE_PRIORITY_DEFAULT = 99
 
 STX_DEFAULT_DISTRO = "debian"
-STX_DEFAULT_DISTRO_LIST = [ "debian", "centos" ]
+STX_DEFAULT_DISTRO_LIST = [ "debian" ]
 STX_DEFAULT_BUILD_TYPE = "std"
 STX_DEFAULT_BUILD_TYPE_LIST = [STX_DEFAULT_BUILD_TYPE]
 
diff --git a/build-tools/stxRpmUtils.py b/build-tools/stxRpmUtils.py
deleted file mode 100644
index f6b1d412..00000000
--- a/build-tools/stxRpmUtils.py
+++ /dev/null
@@ -1,41 +0,0 @@
-#
-# Copyright (c) 2019 Wind River Systems, Inc.
-#
-# SPDX-License-Identifier: Apache-2.0
-#
-
-#
-#  A place to collect potentially reusable python functions
-#
-
-def splitRpmFilename(filename):
-    """
-    Split an rpm filename into components:
-    package name, version, release, epoch, architecture
-    """
-
-    if filename[-4:] == '.rpm':
-        filename = filename[:-4]
-
-    idx = filename.rfind('.')
-    arch = filename[idx+1:]
-    filename = filename[:idx]
-
-    idx = filename.rfind('-')
-    rel = filename[idx+1:]
-    filename = filename[:idx]
-
-    idx = filename.rfind('-')
-    ver = filename[idx+1:]
-    filename = filename[:idx]
-
-    idx = filename.find(':')
-    if idx == -1:
-        epoch = ''
-        name = filename
-    else:
-        epoch = filename[:idx]
-        name = filename[idx+1:]
-
-    return name, ver, rel, epoch, arch
-
diff --git a/build-tools/sync-jenkins b/build-tools/sync-jenkins
deleted file mode 100755
index a5caa3ae..00000000
--- a/build-tools/sync-jenkins
+++ /dev/null
@@ -1,154 +0,0 @@
-#!/bin/bash
-
-# This script "syncs" a local workspace up with a Jenkins build.
-#
-# NOTE - please keep this script in one file (i.e. don't break into sub-scripts
-#        or call sub-scripts from this file).  It is expected that doing so will
-#        screw things up if the sub-script gets checked out to a different
-#        version that the main script.
-#
-# The general flow of what it does is:
-#    - checks out $MY_REPO to the same commits as the Jenkins build
-#    - copies over Jenkins build artifacts in an order such that the timestamps
-#      for SRPM/RPMS artifacts make sense (RPMS have later timestamps than SRPMS)
-#
-# The user can then check out changes since the Jenkins build, and build
-# updated artifacts.  Typical use case would be
-#   $ cd $MY_WORKSPACE
-#   $ sync-jenkins --latest
-#   $ cd $MY_REPO
-#   $ wrgit checkout CGCS_DEV_0019
-#   $ cd $MY_WORKSPACE
-#   $ build-pkgs
-#
-# Usage examples:
-#    sync-jenkins --help
-#    sync-jenkins --latest
-#    sync-jenkins yow-cgts4-lx:/localdisk/loadbuild/jenkins/CGCS_3.0_Centos_Build/2016-07-24_22-00-59"
-#
-#
-# It is recommended that this tool be run with an initially empty workspace
-# (or a workspace with only the build configuration file in it).
-#
-# Potential future improvements to this script
-# - check for sane environment before doing anything
-# - auto saving of the current branch of each git, and restoration to that point
-#   after  pull
-# - filter some packages (build-info, packages that depend on LICENSE, etc) from
-#   pull
-
-usage () {
-    echo ""
-    echo "Usage: "
-    echo "    sync-jenkins <--latest|--help|[path_to_jenkins_build]>"
-    echo ""
-    echo "  Examples:"
-    echo "    sync-jenkins --latest"
-    echo "    Syncs to the latest Jenkins build on yow-cgts4-lx"
-    echo ""
-    echo "    sync-jenkins yow-cgts4-lx:/localdisk/loadbuild/jenkins/CGCS_3.0_Centos_Build/2016-07-24_22-00-59"
-    echo "    Syncs to a specfic Jenkins build"
-    echo ""
-}
-
-
-# variables
-BASEDIR=$MY_REPO
-GITHASHFILE="LAST_COMMITS"
-TMPFILE="$MY_WORKSPACE/export/temp.txt"
-HELP=0
-
-TEMP=`getopt -o h --long help,latest -n 'test.sh' -- "$@"`
-
-if [ $? -ne 0 ]; then
-    usage
-    exit 1
-fi
-
-eval set -- "$TEMP"
-
-# extract options and their arguments into variables.
-while true ; do
-    case "$1" in
-        -h|--help) HELP=1 ; shift ;;
-        --latest) JENKINSURL="yow-cgts4-lx:/localdisk/loadbuild/jenkins/CGCS_4.0_Centos_Build/latest_build" ; shift ;;
-        --) shift ; break ;;
-    esac
-done
-
-if [ "x$JENKINSURL" == "x" ]; then
-	JENKINSURL=$@
-fi
-
-if [ $HELP -eq 1 ]; then
-	usage
-	exit 0
-fi
-
-if [ "x$JENKINSURL" == "x" ]; then
-	usage
-	exit 1
-fi
-
-mkdir -p $MY_WORKSPACE/export $MY_WORKSPACE/std/rpmbuild/RPMS $MY_WORKSPACE/std/rpmbuild/SRPMS $MY_WORKSPACE/rt/rpmbuild/RPMS $MY_WORKSPACE/rt/rpmbuild/SRPMS
-rsync $JENKINSURL/$GITHASHFILE $MY_WORKSPACE/$GITHASHFILE
-
-if [ $? -ne 0 ]; then
-    echo "Could not find $GITHASHFILE in $JENKINSURL -- aborting"
-    exit 1
-fi
-
-pushd $MY_REPO > /dev/null
-
-find . -type d -name ".git" | sed "s%/\.git$%%" > $TMPFILE
-
-while read hashfile; do
-	gitdir=`echo $hashfile | cut -d " " -f 1`
-	gitcommit=`echo $hashfile | sed s/.*[[:space:]]//g`
-	echo "doing dir $gitdir commit $gitcommit"
-	
-	pushd $gitdir >/dev/null
-	git checkout $gitcommit
-	popd
-done < $MY_WORKSPACE/$GITHASHFILE
-
-popd
-
-pushd $MY_WORKSPACE
-
-# clean stuff
-for build_type in std rt; do
-   rm -rf $MY_WORKSPACE/$build_type/rpmbuild/SRPMS
-   rm -rf $MY_WORKSPACE/$build_type/rpmbuild/RPMS
-   rm -rf $MY_WORKSPACE/$build_type/rpmbuild/inputs
-   rm -rf $MY_WORKSPACE/$build_type/rpmbuild/srpm_assemble
-done
-
-# copy source rpms from jenkins
-# Note that the order in which things are copies matters significantly.  The
-#   timestamps on files is used to determine (for example) that an SRPM is
-#   order than an RPM, and therefore the RPM does not need to be rebuilt
-for build_type in std rt; do
-   echo "Syncing $build_type build"
-   mkdir -p $MY_WORKSPACE/$build_type/rpmbuild/RPMS
-   mkdir -p $MY_WORKSPACE/$build_type/rpmbuild/SRPMS
-   rsync -r ${JENKINSURL}/$build_type/inputs $build_type/
-   sleep 1
-   rsync -r ${JENKINSURL}/$build_type/srpm_assemble $build_type/
-   sleep 1
-   rsync -r ${JENKINSURL}/$build_type/rpmbuild/SRPMS/* $MY_WORKSPACE/$build_type/rpmbuild/SRPMS
-   sleep 1
-   rsync ${JENKINSURL}/$build_type/centos-repo.last_head $MY_WORKSPACE/$build_type
-   rsync ${JENKINSURL}/$build_type/cgcs-centos-repo.last_head $MY_WORKSPACE/$build_type
-   if [ "$build_type" == "std" ]; then
-      cp $MY_WORKSPACE/$build_type/centos-repo.last_head $MY_REPO/centos-repo/.last_head
-      cp $MY_WORKSPACE/$build_type/cgcs-centos-repo.last_head $MY_REPO/cgcs-centos-repo/.last_head
-   fi
-   sleep 1
-   rsync -r ${JENKINSURL}/$build_type/results $build_type/
-   sleep 1
-   mv $build_type/results/jenkins* $build_type/results/${MY_BUILD_ENVIRONMENT}-$build_type
-   rsync -r ${JENKINSURL}/$build_type/rpmbuild/RPMS/* $MY_WORKSPACE/$build_type/rpmbuild/RPMS
-done
-
-popd
diff --git a/build-tools/sync_jenkins.sh b/build-tools/sync_jenkins.sh
deleted file mode 100755
index b6666de3..00000000
--- a/build-tools/sync_jenkins.sh
+++ /dev/null
@@ -1,145 +0,0 @@
-#!/bin/bash
-
-# This script "syncs" a local workspace up with a Jenkins build.
-#
-# The general flow of what it does is:
-#    - checks out $MY_REPO to the same commits as the Jenkins build
-#    - copies over Jenkins build artifacts in an order such that the timestamps
-#      for SRPM/RPMS artifacts make sense (RPMS have later timestamps than SRPMS)
-#
-# The user can then check out changes since the Jenkins build, and build
-# updated artifacts.  Typical use case would be
-#   $ cd $MY_WORKSPACE
-#   $ sync_jenkins.sh --latest
-#   $ cd $MY_REPO
-#   $ wrgit checkout CGCS_DEV_0017
-#   $ cd $MY_WORKSPACE
-#   $ build-pkgs
-#
-# Usage examples:
-#    sync_jenkins.sh --help
-#    sync_jenkins.sh --latest
-#    sync_jenkins.sh yow-cgts4-lx:/localdisk/loadbuild/jenkins/CGCS_3.0_Centos_Build/2016-07-24_22-00-59"
-#
-#
-# It is recommended that this tool be run with an initially empty workspace
-# (or a workspace with only the build configuration file in it).
-#
-# Potential future improvements to this script
-# - check for sane environment before doing anything
-# - auto saving of the current branch of each git, and restoration to that point
-#   after  pull
-# - filter some packages (build-info, packages that depend on LICENSE, etc) from
-#   pull
-
-usage () {
-    echo ""
-    echo "Usage: "
-    echo "    sync_jenkins.sh <--latest|--help|[path_to_jenkins_build]>"
-    echo ""
-    echo "  Examples:"
-    echo "    sync_jenkins.sh --latest"
-    echo "    Syncs to the latest Jenkins build on yow-cgts4-lx"
-    echo ""
-    echo "    sync_jenkins.sh yow-cgts4-lx:/localdisk/loadbuild/jenkins/CGCS_3.0_Centos_Build/2016-07-24_22-00-59"
-    echo "    Syncs to a specfic Jenkins build"
-    echo ""
-}
-
-
-# variables
-BASEDIR=$MY_REPO
-GITHASHFILE="LAST_COMMITS"
-TMPFILE="$MY_WORKSPACE/export/temp.txt"
-HELP=0
-
-TEMP=`getopt -o h --long help,latest -n 'test.sh' -- "$@"`
-
-if [ $? -ne 0 ]; then
-    usage
-    exit 1
-fi
-
-eval set -- "$TEMP"
-
-# extract options and their arguments into variables.
-while true ; do
-    case "$1" in
-        -h|--help) HELP=1 ; shift ;;
-        --latest) JENKINSURL="yow-cgts4-lx:/localdisk/loadbuild/jenkins/latest_dev_stream/latest_build" ; shift ;;
-        --) shift ; break ;;
-    esac
-done
-
-if [ "x$JENKINSURL" == "x" ]; then
-    JENKINSURL=$@
-fi
-
-if [ $HELP -eq 1 ]; then
-    usage
-    exit 0
-fi
-
-if [ "x$JENKINSURL" == "x" ]; then
-    usage
-    exit 1
-fi
-
-mkdir -p $MY_WORKSPACE/export $MY_WORKSPACE/std/rpmbuild/RPMS $MY_WORKSPACE/std/rpmbuild/SRPMS $MY_WORKSPACE/rt/rpmbuild/RPMS $MY_WORKSPACE/rt/rpmbuild/SRPMS
-rsync $JENKINSURL/$GITHASHFILE $MY_WORKSPACE/$GITHASHFILE
-
-if [ $? -ne 0 ]; then
-    echo "Could not find $GITHASHFILE in $JENKINSURL -- aborting"
-    exit 1
-fi
-
-pushd $MY_REPO > /dev/null
-
-find . -type d -name ".git" | sed "s%/\.git$%%" > $TMPFILE
-
-while read hashfile; do
-    gitdir=`echo $hashfile | cut -d " " -f 1`
-    gitcommit=`echo $hashfile | sed s/.*[[:space:]]//g`
-    echo "doing dir $gitdir commit $gitcommit"
-
-    pushd $gitdir >/dev/null
-    git checkout $gitcommit
-    popd
-done < $MY_WORKSPACE/$GITHASHFILE
-
-popd
-
-pushd $MY_WORKSPACE
-
-# clean stuff
-for build_type in std rt; do
-    rm -rf $MY_WORKSPACE/$build_type/rpmbuild/SRPMS
-    rm -rf $MY_WORKSPACE/$build_type/rpmbuild/RPMS
-    rm -rf $MY_WORKSPACE/$build_type/rpmbuild/inputs
-    rm -rf $MY_WORKSPACE/$build_type/rpmbuild/srpm_assemble
-done
-
-# copy source rpms from jenkins
-for build_type in std rt; do
-    mkdir -p $MY_WORKSPACE/$build_type/rpmbuild/RPMS
-    mkdir -p $MY_WORKSPACE/$build_type/rpmbuild/SRPMS
-    rsync -r ${JENKINSURL}/$build_type/inputs $build_type/
-    sleep 1
-    rsync -r ${JENKINSURL}/$build_type/srpm_assemble $build_type/
-    sleep 1
-    rsync -r ${JENKINSURL}/$build_type/rpmbuild/SRPMS/* $MY_WORKSPACE/$build_type/rpmbuild/SRPMS
-    sleep 1
-    # Some of there directories might not exist (obsolete).  Just do our best and ignore errors
-    for sub_repo in centos-repo cgcs-centos-repo local-repo cgcs-tis-repo; do
-        rsync ${JENKINSURL}/$build_type/$sub_repo.last_head $MY_WORKSPACE/$build_type
-        if [ $? -eq 0 ] && [ "$build_type" == "std" ]; then
-            cp $MY_WORKSPACE/$build_type/$sub_repo.last_head $MY_REPO/$sub_repo/.last_head
-        fi
-    done
-    sleep 1
-    rsync -r ${JENKINSURL}/$build_type/results $build_type/
-    sleep 1
-    rsync -r ${JENKINSURL}/$build_type/rpmbuild/RPMS/* $MY_WORKSPACE/$build_type/rpmbuild/RPMS
-done
-
-popd
diff --git a/build-tools/tis.macros b/build-tools/tis.macros
deleted file mode 100644
index e72ad2ed..00000000
--- a/build-tools/tis.macros
+++ /dev/null
@@ -1,11 +0,0 @@
-#
-# This file provides name=value pairs that are added to the build
-# config file as 'macros' passed into the RPM build
-#
-%__gzip=/usr/bin/pigz
-%__bzip2=/usr/bin/lbzip2
-%_patch_confdir=%{_sysconfdir}/patching
-%_patch_scripts=%{_patch_confdir}/patch-scripts
-%_runtime_patch_scripts=/run/patching/patch-scripts
-%_tis_dist=.tis
-
diff --git a/build-tools/update-efiboot-image b/build-tools/update-efiboot-image
deleted file mode 100755
index 3b61d879..00000000
--- a/build-tools/update-efiboot-image
+++ /dev/null
@@ -1,243 +0,0 @@
-#!/bin/bash
-#
-# Copyright (c) 2016-2017 Wind River Systems, Inc.
-#
-# SPDX-License-Identifier: Apache-2.0
-#
-# Update the efiboot.img (See https://wiki.archlinux.org/index.php/Remastering_the_Install_ISO)
-# We need to mount the image file, make any changes to the filesystem, and unmount.
-#
-# e.g. udisksctl loop-setup -f efiboot.img --no-user-interaction
-#             Mapped file efiboot.img as /dev/loop0.
-#      udisksctl mount -b /dev/loop0
-#             Mounted /dev/loop0 at /run/media/kbujold/ANACONDA.
-#
-
-MY_YUM_CONF=""
-
-# Several commands may need to be executed with sudo if we're not using
-# udev.  Use a variable to hold the optional "sudo" part
-if [ 0${BUILD_ISO_USE_UDEV} -eq 1 ]; then
-    SUDOPREFIX=""
-else
-    SUDOPREFIX="sudo"
-fi
-
-function env_check {
-    for VAR_TO_CHECK in $@; do
-        if [ -z "${!VAR_TO_CHECK}" ]; then
-            echo "Required environment variable is missing: $VAR_TO_CHECK"
-            exit 1
-        fi
-    done
-}
-
-env_check MY_REPO MY_WORKSPACE BSP_FILES_PATH
-
-# Cleanup function that will release all mounts and loop devices
-function finish {
-    if [ -z "$LOOP" ] && [ ! -z "$SETUP_RET" ]; then
-        if [ 0${BUILD_ISO_USE_UDEV} -eq 1 ]; then
-            LOOP=$(echo $SETUP_RET | awk '{print $5;}' | sed -e 's/\.//g')
-        else
-            LOOP=$(echo $SETUP_RET)
-        fi
-    fi
-
-    if [ ! -z "$LOOP" ]; then
-        if [ 0${BUILD_ISO_USE_UDEV} -eq 1 ]; then
-            udisksctl unmount -b $LOOP
-        else
-            sudo umount $LOOP
-        fi
-        echo $(date) Unmounted $LOOP. $? | tee --append $MOUNT_LOG_FILE
-
-        if [ 0${BUILD_ISO_USE_UDEV} -eq 1 ]; then
-            CLEANUP_RET=$(udisksctl loop-delete -b $LOOP)
-        else
-            CLEANUP_RET=$(sudo losetup -d $LOOP)
-        fi
-        echo $(date) Released loop device $LOOP. $CLEANUP_RET | tee --append $MOUNT_LOG_FILE
-    fi
-
-
-    if [ ! -z "$EFI_MOUNT" ] && [ -d "$EFI_MOUNT" ]; then
-        ${SUDOPREFIX} rmdir $EFI_MOUNT
-        echo $(date) Deleted mount point $EFI_MOUNT | tee --append $MOUNT_LOG_FILE
-    fi
-
-}
-
-function setup_env_vars  {
-    mkdir -p $MY_WORKSPACE/export/
-
-    MY_YUM_CONF=$(create-yum-conf)
-    if [ $? -ne 0 ]; then
-       echo "ERROR: create-yum-conf failed"
-       exit 1
-    fi
-
-    DISTRO_REPO_DIR=$(for d in $(grep baseurl $MY_YUM_CONF | grep file: | awk -F : '{print $2}' | sed 's:///:/:g'); do if [ -d $d/images ]; then echo $d ;fi; done)
-
-    if [ ! -d "$DISTRO_REPO_DIR" ] ; then
-      printf "  Error -- could not access $DISTRO_REPO_DIR\n"
-      exit 1
-    fi
-
-    # where to put stuff (curent dir unless MY_WORKSPACE defined)
-    OUTPUT_DIR="$PWD/export"
-    if [ ! -z "$MY_WORKSPACE" ] && [ -d "$MY_WORKSPACE" ] ; then
-       OUTPUT_DIR="$MY_WORKSPACE/export"
-    fi
-
-    # Directory in which to populate files to be distributed
-    OUTPUT_DIST_DIR=$OUTPUT_DIR/dist
-
-    if [ ! -z "$MY_REPO" ] && [ -d "$MY_REPO" ] ; then
-      INTERNAL_REPO_ROOT=$MY_REPO
-    fi
-
-    if [ -z "$INTERNAL_REPO_ROOT" ] ; then
-      if [ ! -z "$MY_REPO_ROOT_DIR" ] && [ -d "$MY_REPO_ROOT_DIR/cgcs-root" ] ; then
-          INTERNAL_REPO_ROOT=$MY_REPO_ROOT_DIR/cgcs-root
-      fi
-    fi
-
-    if [ -z "$INTERNAL_REPO_ROOT" ] ; then
-      if [ -d "$MY_WORKSPACE/std/repo" ] ; then
-          INTERNAL_REPO_ROOT=$MY_WORKSPACE/std/repo
-      fi
-    fi
-
-    if [ -z "$INTERNAL_REPO_ROOT" ] ; then
-      printf "  Error -- could not locate cgcs-root repo.\n"
-      exit 1
-    fi
-}
-
-printf "  Calling $0\n"
-
-setup_env_vars
-
-printf "  Calling $(basename $0)\n"
-
-mkdir -p $OUTPUT_DIR
-if [ $? -ne 0 ]; then
-   printf "  Error: failed to create directory '$OUTPUT_DIR'.\n"
-   exit 1
-fi
-
-MOUNT_LOG_FILE=$OUTPUT_DIR/mounts_used.log
-touch $MOUNT_LOG_FILE
-if [ $? -ne 0 ]; then
-   printf "  Error: Failed to create log file '$MOUNT_LOG_FILE'.\n"
-   exit 1
-fi
-
-# Register our cleanup function
-trap finish EXIT
-
-# Clear old image file
-printf "  Delete old efiboot.img file\n"
-rm -f $OUTPUT_DIR/efiboot.img
-yum clean all -c $MY_YUM_CONF
-
-# Copy Vanilla Centos image file
-cp -L -u $DISTRO_REPO_DIR/images/efiboot.img $OUTPUT_DIR/
-
-printf "  Replacing the efiboot.img grub.cfg file with the Titanium Cloud one\n"
-
-# We can either use udev or sudo to mount loopback device, etc.
-# This is controlled via env variable
-
-if [ 0${BUILD_ISO_USE_UDEV} -eq 1 ]; then
-    SETUP_RET=$(udisksctl loop-setup -f $OUTPUT_DIR/efiboot.img --no-user-interaction)
-    if [ $? -ne 0 ]; then
-      printf "  Error: failed udev loop-setup command.\n"
-      exit 1
-    fi
-    LOOP=$(echo $SETUP_RET | awk '{print $5;}' | sed -e 's/\.//g')
-else
-    # no udev - use losetup command
-    # retcode is the lo device used
-    SETUP_RET=$(sudo losetup --show -f $OUTPUT_DIR/efiboot.img)
-    if [ -z "$SETUP_RET" ] ; then
-      printf "  Error: failed sudo losetup command.\n"
-      exit 1
-    fi
-
-    # Save the loop device used into a file
-    echo $(date) $SETUP_RET >> $MOUNT_LOG_FILE
-
-    LOOP=$(echo $SETUP_RET)
-    if [ -z $LOOP ] ; then
-      printf "  Error: failed losetup  command.\n"
-      exit 1
-    fi
-fi
-
-# Mount the filesystem
-if [ 0${BUILD_ISO_USE_UDEV} -eq 1 ]; then
-    udisksctl mount -b $LOOP
-    EFI_MOUNT=$(udisksctl info -b $LOOP | grep MountPoints | awk '{print $2;}')
-else
-    EFI_MOUNT=$(sudo mktemp -d -p /mnt -t EFI-noudev.XXXXXX)
-    sudo mount $LOOP $EFI_MOUNT
-fi
-
-if [ -z $EFI_MOUNT ] ; then
-  printf "  Error: failed mount command.\n"
-  exit 1
-fi
-
-# Update the vanilla UEFI Centos grub.cfg with the Titanium Cloud version
-${SUDOPREFIX} cp "$BSP_FILES_PATH/grub.cfg"  "$EFI_MOUNT/EFI/BOOT/grub.cfg"
-
-# For backward compatibility.  Old repo location or new?
-CENTOS_REPO=${MY_REPO}/centos-repo
-if [ ! -d ${CENTOS_REPO} ]; then
-    CENTOS_REPO=${MY_REPO}/cgcs-centos-repo
-    if [ ! -d ${CENTOS_REPO} ]; then
-        echo "ERROR: directory ${MY_REPO}/centos-repo not found."
-        exit 1
-    fi
-fi
-
-# Update the grub and shim executables with the Titanium Cloud signed versions
-#
-# To do this, we extract the RPMS, grab the two executables we need, and replace
-# the ones in the current filesystem
-TMPDIR=`mktemp -d`
-SHIMPKG=`find $MY_WORKSPACE/std/rpmbuild/RPMS ${CENTOS_REPO}/Binary -name 'shim-x64-[0-9]*.x86_64.rpm'`
-if [ -z "$SHIMPKG" ]; then
-    SHIMPKG=`find $MY_WORKSPACE/std/rpmbuild/RPMS ${CENTOS_REPO}/Binary -name 'shim-[0-9]*.x86_64.rpm'`
-fi
-if [ -z "$SHIMPKG" ]; then
-    printf "  Error -- could not locate shim binary package"
-    exit 1
-fi
-
-GRUBPKG=`find $MY_WORKSPACE/std/rpmbuild/RPMS ${CENTOS_REPO}/Binary -name 'grub2-efi-x64-[0-9]*.x86_64.rpm'`
-if [ -z "$GRUBPKG" ]; then
-    GRUBPKG=`find $MY_WORKSPACE/std/rpmbuild/RPMS ${CENTOS_REPO}/Binary -name 'grub2-efi-[0-9]*.x86_64.rpm'`
-fi
-if [ -z "$GRUBPKG" ]; then
-    printf "  Error -- could not locate grub binary package"
-    exit 1
-fi
-
-pushd $TMPDIR >/dev/null
-rpm2cpio $SHIMPKG | cpio -id --quiet
-${SUDOPREFIX} find . -name "shim.efi" | xargs -I '{}' ${SUDOPREFIX} cp '{}' $EFI_MOUNT/EFI/BOOT/BOOTX64.EFI
-rm -rf *
-
-rpm2cpio $GRUBPKG | cpio -id --quiet
-${SUDOPREFIX} find . -name "grubx64.efi" | xargs -I '{}' ${SUDOPREFIX} cp '{}' $EFI_MOUNT/EFI/BOOT/grubx64.efi
-popd >/dev/null
-rm -rf $TMPDIR
-
-# Create a directory for Secure Boot certificate
-${SUDOPREFIX} mkdir -p $EFI_MOUNT/CERTS
-${SUDOPREFIX} cp $INTERNAL_REPO_ROOT/build-tools/certificates/* $EFI_MOUNT/CERTS
-
-exit 0
diff --git a/build-tools/update-pxe-network-installer b/build-tools/update-pxe-network-installer
deleted file mode 100755
index efd31e26..00000000
--- a/build-tools/update-pxe-network-installer
+++ /dev/null
@@ -1,197 +0,0 @@
-#!/bin/bash -e
-## this script is to update pxeboot images (vmlinuz, initrd.img and squashfs.img).
-## based on RPMs generated by "build-pkgs" and "build-iso"
-## created by Yong Hu (yong.hu@intel.com), 05/24/2018
-
-# For backward compatibility.  Old repo location or new?
-CENTOS_REPO=${MY_REPO}/centos-repo
-if [ ! -d ${CENTOS_REPO} ]; then
-    CENTOS_REPO=${MY_REPO}/cgcs-centos-repo
-    if [ ! -d ${CENTOS_REPO} ]; then
-        echo "ERROR: directory ${MY_REPO}/centos-repo not found."
-        exit 1
-    fi
-fi
-
-find_and_copy_rpm () {
-    local name="${1}"
-    local pattern="${2}"
-    local build_type="${3}"
-    local dest_dir="${4}"
-    local optional="${5}"
-
-    echo " --> find ${name} rpm"
-    found=$(find $MY_BUILD_DIR/${build_type}/rpmbuild/RPMS -type f -name "${pattern}" | head -n 1)
-    if [ ! -n "${found}" ];then
-        if [ "${build_type}" != "rt" ]; then
-            found=$(find ${CENTOS_REPO}/Binary -type l -name "${pattern}" | head -n 1)
-        else
-            found=$(find ${CENTOS_REPO}/${build_type}/Binary -type l -name "${pattern}" | head -n 1)
-        fi
-    fi
-
-    if [ -n "${found}" ] && [ -f "${found}" ];then
-        \cp -f "${found}" "${dest_dir}/"
-    elif [ -z "${optional}" ]; then
-        echo "ERROR: failed to find ${name} RPM!"
-        exit -1
-    fi
-}
-
-find_firmware() {
-    (
-        set -e
-        pattern="centos_firmware.inc"
-        cd $MY_REPO_ROOT_DIR
-        repo forall -c 'echo $REPO_PATH' \
-            | xargs -r -i find '{}' -mindepth 1 -maxdepth 1 -xtype f -name "$pattern" \
-            | xargs -r grep -E -v '^\s*(#.*)?$' \
-            | sort -u
-    )
-}
-
-echo "Start to update pxe-network-installer images .... "
-timestamp=$(date +%F_%H%M)
-cur_dir=$PWD
-
-pxe_network_installer_dir=$MY_BUILD_DIR/pxe-network-installer
-if [ ! -d $pxe_network_installer_dir ];then
-    mkdir -p $pxe_network_installer_dir
-fi
-
-firmware_list_file=${pxe_network_installer_dir}/firmware-list
-if [ -f ${firmware_list_file} ]; then
-    mv ${firmware_list_file} ${firmware_list_file}-bak-${timestamp}
-fi
-if [ -n "${UPDATE_FW_LIST}" ] && [ -f "${UPDATE_FW_LIST}" ]; then
-    cp -f ${UPDATE_FW_LIST} ${firmware_list_file}
-fi
-find_firmware >"${firmware_list_file}.tmp"
-if [[ -s "${firmware_list_file}.tmp" ]] ; then
-    cat "${firmware_list_file}.tmp" >>"${firmware_list_file}"
-fi
-\rm -f "${firmware_list_file}.tmp"
-if [[ -f "${firmware_list_file}" ]] ; then
-    echo "Including firmware files in installer:" >&2
-    cat "${firmware_list_file}" | sed -r 's/^/\t/' >&2
-fi
-
-cd $pxe_network_installer_dir
-
-echo "step 1: copy original images: vmlinuz, initrd.img, squashfs.img"
-orig_img_dir="orig"
-if [ ! -d $orig_img_dir ];then
-    mkdir -p $orig_img_dir
-fi
-
-orig_initrd_img="${CENTOS_REPO}/Binary/images/pxeboot/initrd.img"
-if [ -f $orig_initrd_img ]; then
-    cp -f $orig_initrd_img $pxe_network_installer_dir/$orig_img_dir/.
-else
-    echo "$orig_initrd_img does not exit"
-    exit -1
-fi
-
-orig_squashfs_img="${CENTOS_REPO}/Binary/LiveOS/squashfs.img"
-if [ -f $orig_squashfs_img ]; then
-    cp -f $orig_squashfs_img $pxe_network_installer_dir/$orig_img_dir/.
-else
-    echo "$orig_squashfs_img does not exit"
-    exit -1
-fi
-
-echo ""
-echo "step 2: prepare necessary kernel RPMs"
-echo ""
-kernel_rpms_std="$pxe_network_installer_dir/kernel-rpms/std"
-kernel_rpms_rt="$pxe_network_installer_dir/kernel-rpms/rt"
-
-echo "--> get $kernel_rpms_std ready"
-echo "--> get $kernel_rpms_rt ready"
-
-if [ -d $kernel_rpms_std ];then
-    mv $kernel_rpms_std $kernel_rpms_std-bak-$timestamp
-fi
-mkdir -p $kernel_rpms_std
-
-if [ -d $kernel_rpms_rt ];then
-    mv $kernel_rpms_rt $kernel_rpms_rt-bak-$timestamp
-fi
-mkdir -p $kernel_rpms_rt
-
-echo " -------- start to search standard kernel rpm and related kernel modules --------"
-find_and_copy_rpm 'standard kernel'                  'kernel-[0-9]*.x86_64.rpm'                     std "$kernel_rpms_std"
-find_and_copy_rpm 'standard kernel core'             'kernel-core-[0-9]*.x86_64.rpm'                std "$kernel_rpms_std"
-find_and_copy_rpm 'standard kernel modules'          'kernel-modules-[0-9]*.x86_64.rpm'             std "$kernel_rpms_std"
-find_and_copy_rpm 'standard kernel modules extra'    'kernel-modules-extra-[0-9]*.x86_64.rpm'       std "$kernel_rpms_std"
-find_and_copy_rpm 'standard kernel modules internal' 'kernel-modules-internal-[0-9]*.x86_64.rpm'    std "$kernel_rpms_std"
-find_and_copy_rpm 'e1000e kernel module'             'kmod-e1000e-[0-9]*.x86_64.rpm'                std "$kernel_rpms_std" optional
-find_and_copy_rpm 'i40e kernel module'               'kmod-i40e-[0-9]*.x86_64.rpm'                  std "$kernel_rpms_std"
-find_and_copy_rpm 'ixgbe kernel module'              'kmod-ixgbe-[0-9]*.x86_64.rpm'                 std "$kernel_rpms_std" optional
-find_and_copy_rpm 'mlnx-ofa kernel module'           'mlnx-ofa_kernel-modules-[0-9]*.x86_64.rpm'    std "$kernel_rpms_std"
-find_and_copy_rpm 'ice kernel module'                'kmod-ice-[0-9]*.x86_64.rpm'                   std "$kernel_rpms_std"
-find_and_copy_rpm 'bnxt_en kernel module'            'kmod-bnxt_en-[0-9]*.x86_64.rpm'               std "$kernel_rpms_std"
-echo " -------- successfully found standard kernel rpm and related kernel modules --------"
-echo ""
-
-echo "step 3: prepare necessary firmware RPMs"
-mkdir -p ${pxe_network_installer_dir}/firmware-rpms
-
-if [ -f "${firmware_list_file}" ]; then
-
-    firmware_rpms_std="${pxe_network_installer_dir}/firmware-rpms/std"
-    firmware_rpms_rt="${pxe_network_installer_dir}/firmware-rpms/rt"
-
-    echo "--> get ${firmware_rpms_std} ready"
-    echo "--> get ${firmware_rpms_rt} ready"
-
-    if [ -d ${firmware_rpms_std} ];then
-        mv ${firmware_rpms_std} ${firmware_rpms_std}-bak-${timestamp}
-    fi
-    mkdir -p ${firmware_rpms_std}
-
-    if [ -d ${firmware_rpms_rt} ];then
-        mv ${firmware_rpms_rt} ${firmware_rpms_rt}-bak-${timestamp}
-    fi
-    mkdir -p ${firmware_rpms_rt}
-
-    echo " -------- start to search standard firmware rpm -------"
-    find_and_copy_rpm 'standard firmware'                  'linux-firmware-[0-9]*.noarch.rpm'           std "${firmware_rpms_std}"
-    echo " -------- successfully found standard firmware rpm --------"
-    echo ""
-
-fi
-
-rootfs_rpms="$pxe_network_installer_dir/rootfs-rpms"
-if [ -d $rootfs_rpms ];then
-    mv $rootfs_rpms $rootfs_rpms-bak-$timestamp
-fi
-mkdir -p $rootfs_rpms
-
-echo "step 4:  start to search rpms for rootfs"
-find_and_copy_rpm 'anaconda'                   'anaconda-[0-9]*.x86_64.rpm'                   installer "$rootfs_rpms/."
-find_and_copy_rpm 'anaconda-core'              'anaconda-core-[0-9]*.x86_64.rpm'              installer "$rootfs_rpms/."
-find_and_copy_rpm 'anaconda-tui'               'anaconda-tui-[0-9]*.x86_64.rpm'               installer "$rootfs_rpms/."
-find_and_copy_rpm 'anaconda-widgets'           'anaconda-widgets-[0-9]*.x86_64.rpm'           installer "$rootfs_rpms/."
-find_and_copy_rpm 'rpm'                        'rpm-[0-9]*.x86_64.rpm'                        installer "$rootfs_rpms/."
-find_and_copy_rpm 'rpm-build'                  'rpm-build-[0-9]*.x86_64.rpm'                  installer "$rootfs_rpms/."
-find_and_copy_rpm 'rpm-build-libs'             'rpm-build-libs-[0-9]*.x86_64.rpm'             installer "$rootfs_rpms/."
-find_and_copy_rpm 'rpm-libs'                   'rpm-libs-[0-9]*.x86_64.rpm'                   installer "$rootfs_rpms/."
-find_and_copy_rpm 'rpm-plugin-systemd-inhibit' 'rpm-plugin-systemd-inhibit-[0-9]*.x86_64.rpm' installer "$rootfs_rpms/."
-find_and_copy_rpm 'rpm-python'                 'rpm-python-[0-9]*.x86_64.rpm'                 installer "$rootfs_rpms/."
-
-find_and_copy_rpm 'systemd'       'systemd-[0-9]*.x86_64.rpm'       std "$rootfs_rpms/."
-find_and_copy_rpm 'systemd-libs'  'systemd-libs-[0-9]*.x86_64.rpm'  std "$rootfs_rpms/."
-find_and_copy_rpm 'systemd-sysv'  'systemd-sysv-[0-9]*.x86_64.rpm'  std "$rootfs_rpms/."
-find_and_copy_rpm 'lz4'           'lz4-[0-9]*.x86_64.rpm'           std "$rootfs_rpms/."
-find_and_copy_rpm 'bind-utils'    'bind-utils-[0-9]*.x86_64.rpm'    std "$rootfs_rpms/."
-find_and_copy_rpm 'ima-evm-utils' 'ima-evm-utils-[0-9]*.x86_64.rpm' std "$rootfs_rpms/."
-echo " ---------------- successfully found rpms for rootfs --------------------------------"
-
-echo "step 5: make installer images in this work dir"
-same_folder="$(dirname ${BASH_SOURCE[0]})"
-mk_images_tool="$same_folder/make-installer-images.sh"
-sudo $mk_images_tool $pxe_network_installer_dir
-
-cd $cur_dir
-echo "updating pxe-network-installer images -- done!"
diff --git a/build-tools/url_utils.sh b/build-tools/url_utils.sh
index 640bfef0..11c8b4df 100755
--- a/build-tools/url_utils.sh
+++ b/build-tools/url_utils.sh
@@ -198,6 +198,8 @@ repo_url_to_sub_path () {
     fi
 
     # set FAMILY from URL
+    echo $URL | grep -q 'debian[.]org' && FAMILY=debian
+    echo $URL | grep -q 'mirror[.]csclub[.]uwaterloo[.]ca[/]debian-security' && FAMILY=debian
     echo $URL | grep -q 'centos[.]org' && FAMILY=centos
     echo $URL | grep -q 'fedoraproject[.]org[/]pub[/]epel' && FAMILY=epel
 
diff --git a/build-tools/wheel-utils.sh b/build-tools/wheel-utils.sh
index 044d5d2f..0dbb889e 100755
--- a/build-tools/wheel-utils.sh
+++ b/build-tools/wheel-utils.sh
@@ -17,9 +17,9 @@ source "${WHEEL_UTILS_DIR}/git-utils.sh"
 #
 # Parameters:
 #    stream:    One of 'stable', 'dev'
-#    distro:    One of 'centos', ...
+#    distro:    One of 'debian', ...
 #
-# Returns: A list of unique rpm packages that contain needed wheel
+# Returns: A list of unique packages that contain needed wheel
 #          files.  This is the union per git wheels.inc files.
 
 wheels_inc_list () {