From 5b868e5857a586ce5a91d8f74064eda66a70ba09 Mon Sep 17 00:00:00 2001 From: Monty Taylor Date: Sat, 17 Oct 2015 16:04:55 -0400 Subject: [PATCH] Retire stackforge/striker --- .gitignore | 14 - LICENSE | 176 ---- MANIFEST.in | 2 - README.rst | 30 +- doc/Makefile | 136 --- requirements.txt | 6 - run_tests.sh | 265 ----- setup.cfg | 28 - setup.py | 28 - striker/__init__.py | 14 - striker/api/__init__.py | 14 - striker/cli/__init__.py | 14 - striker/common/__init__.py | 14 - striker/common/config.py | 1274 ----------------------- striker/common/utils.py | 99 -- striker/core/__init__.py | 14 - striker/core/context.py | 123 --- striker/core/environment.py | 301 ------ test-requirements.txt | 6 - tests/__init__.py | 121 --- tests/function/__init__.py | 14 - tests/function/api/__init__.py | 14 - tests/function/cli/__init__.py | 14 - tests/unit/__init__.py | 14 - tests/unit/api/__init__.py | 14 - tests/unit/cli/__init__.py | 14 - tests/unit/common/__init__.py | 14 - tests/unit/common/test_config.py | 1472 --------------------------- tests/unit/common/test_utils.py | 123 --- tests/unit/core/__init__.py | 14 - tests/unit/core/test_context.py | 79 -- tests/unit/core/test_environment.py | 667 ------------ tox.ini | 27 - 33 files changed, 5 insertions(+), 5154 deletions(-) delete mode 100644 .gitignore delete mode 100644 LICENSE delete mode 100644 MANIFEST.in delete mode 100644 doc/Makefile delete mode 100644 requirements.txt delete mode 100755 run_tests.sh delete mode 100644 setup.cfg delete mode 100644 setup.py delete mode 100644 striker/__init__.py delete mode 100644 striker/api/__init__.py delete mode 100644 striker/cli/__init__.py delete mode 100644 striker/common/__init__.py delete mode 100644 striker/common/config.py delete mode 100644 striker/common/utils.py delete mode 100644 striker/core/__init__.py delete mode 100644 striker/core/context.py delete mode 100644 striker/core/environment.py delete mode 100644 test-requirements.txt delete mode 100644 tests/__init__.py delete mode 100644 tests/function/__init__.py delete mode 100644 tests/function/api/__init__.py delete mode 100644 tests/function/cli/__init__.py delete mode 100644 tests/unit/__init__.py delete mode 100644 tests/unit/api/__init__.py delete mode 100644 tests/unit/cli/__init__.py delete mode 100644 tests/unit/common/__init__.py delete mode 100644 tests/unit/common/test_config.py delete mode 100644 tests/unit/common/test_utils.py delete mode 100644 tests/unit/core/__init__.py delete mode 100644 tests/unit/core/test_context.py delete mode 100644 tests/unit/core/test_environment.py delete mode 100644 tox.ini diff --git a/.gitignore b/.gitignore deleted file mode 100644 index 29d8fa2..0000000 --- a/.gitignore +++ /dev/null @@ -1,14 +0,0 @@ -/.DS_Store -/.coverage -/.tox -/.venv -/AUTHORS -/ChangeLog -/build -/cov_html -/dist -*.egg-info -*.log -*.pyc -*.swp -*.whl diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 68c771a..0000000 --- a/LICENSE +++ /dev/null @@ -1,176 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 66c0ca8..0000000 --- a/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -include requirements.txt test-requirements.txt README.rst run_tests.sh -recursive-include tests *.py diff --git a/README.rst b/README.rst index 5f0a78c..9006052 100644 --- a/README.rst +++ b/README.rst @@ -1,27 +1,7 @@ -======= -Striker -======= +This project is no longer maintained. -Striker is a deployment package builder, capable of building packages -in several different formats, running basic tests on those packages, -and distributing them. It is a tool for managing the build and -release lifecycle. +The contents of this repository are still available in the Git source code +management system. To see the contents of this repository before it reached +its end of life, please check out the previous commit with +"git checkout HEAD^1". -Packaging -========= - -Why another packaging tool? After all, the Python world already has -eggs and wheels, and they work really well, right? Well, yes and no. -A wheel (or the older egg) contains a single package and information -about its dependencies. Installing a full project onto a system -requires that several wheels be downloaded and installed, and we often -have dependencies on the exact versions that are used--i.e., which -exact versions have we performed acceptance tests against? Also, when -you're talking about installing a package across several thousand -machines, just downloading all those dependencies represents an -enormous network load. - -Striker is intended to help with this problem. The packages it builds -include all of the dependencies into a single artifact, which can then -be distributed to those several thousand systems in a much more -scalable fashion, such as a peer-to-peer system like BitTorrent. diff --git a/doc/Makefile b/doc/Makefile deleted file mode 100644 index 2cdd0f5..0000000 --- a/doc/Makefile +++ /dev/null @@ -1,136 +0,0 @@ -# Makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = -BUILDDIR = build - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source - -.PHONY: help clean html pdf dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest - -help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " pdf to make pdf with rst2pdf" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - -clean: - -rm -rf $(BUILDDIR)/* - -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -pdf: - $(SPHINXBUILD) -b pdf $(ALLSPHINXOPTS) $(BUILDDIR)/pdf - @echo - @echo "Build finished. The PDFs are in $(BUILDDIR)/pdf." - -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/NebulaDocs.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/NebulaDocs.qhc" - -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/NebulaDocs" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/NebulaDocs" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - make -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 0f2ba68..0000000 --- a/requirements.txt +++ /dev/null @@ -1,6 +0,0 @@ -pbr>=0.6,!=0.7,<1.0 -cliff -jsonschema -PyYAML -six -stevedore diff --git a/run_tests.sh b/run_tests.sh deleted file mode 100755 index 7bde2cc..0000000 --- a/run_tests.sh +++ /dev/null @@ -1,265 +0,0 @@ -#!/bin/sh - -########################## Various utility functions ########################## - -# Canonicalize a directory name -canon_dir () { - (cd "$1" && pwd -P) -} - -# Extract the argument from "--foo=..." style arguments -get_arg () { - echo "$1" | sed 's/[^=]*=//' -} - -# Add a parameter to the set of parameters with which to invoke -# nosetests -add_params () { - if [ x"${params}" = x ]; then - params="$*" - else - params="${params} $*" - fi -} - -# An alias for the Python interpreter from the virtual environment -python () { - ${venv_path}/bin/python "$@" -} - -# An alias for pip from the virtual environment -pip () { - # Invoke using the python from the virtual environment; this works - # around spaces being present in the "#!" line - python ${venv_path}/bin/pip "$@" -} - -# An alias for pep8, using the virtual environment if requested -run_pep8 () { - if [ ${venv} = yes ]; then - # Invoke using the python from the virtual environment; this - # works around spaces being present in the "#!" line - python ${venv_path}/bin/pep8 "$@" - else - pep8 "$@" - fi -} - -# An alias for nosetests, using the virtual environment if requested -run_nosetests () { - if [ ${venv} = yes ]; then - # Invoke using the python from the virtual environment; this - # works around spaces being present in the "#!" line - python ${venv_path}/bin/nosetests "$@" - else - nosetests "$@" - fi -} - -# Output a usage message -usage () { - cat >&2 <] - -Execute the Striker test suite. - -Options: - -h - --help Outputs this help text. - - -V - --virtual-env - Set up and use a virtual environment for testing. - - -N - --no-virtual-env - Do not set up or use a virtual environment for testing. - - -r - --reset Resets the virtual environment prior to building it. - - -p - --pep8 Execute only the PEP8 compliance check. - - -P - --no-pep8 Do not execute the PEP8 compliance check. - - -c - --coverage Generate a coverage report - - -H - --coverage-html= - Specify the directory to contain the HTML coverage - report. - - A list of test specifications for nosetests. -EOF - - exit ${1:-1} -} - -################################ Initialization ############################### - -prog=`basename $0` -dir=`dirname $0` -dir=`canon_dir "${dir}"` - -# Initialize parameters for invoking nosetests -params= - -# Initialize other variables -venv=ask -reset=false -pep8=yes -coverage=no -cov_html=cov_html - -############################## Process arguments ############################## - -while [ $# -gt 0 ]; do - case "$1" in - -h|--help) - usage 0 2>&1 - ;; - -V|--virtual-env) - venv=yes - ;; - -N|--no-virtual-env) - venv=no - ;; - -r|--reset) - reset=true - ;; - -p|--pep8) - pep8=only - ;; - -P|--no-pep8) - pep8=no - ;; - -c|--coverage) - coverage=yes - ;; - -H|--coverage-html) - shift - cov_html="$1" - ;; - --coverage-html=*) - cov_html=`get_arg "$1"` - ;; - --*) - echo "Unrecognized option \"$1\"" >&2 - usage - ;; - *) - add_params "$1" - ;; - esac - - shift -done - -############################ Set up the environment ########################### - -# Ask if we should use a virtual environment -venv_path="${dir}/.venv" -if [ ${venv} = ask ]; then - if [ -d ${venv_path} ]; then - venv=yes - else - echo -n "No virtual environment found; create one? (Y/n) " - read use_venv - if [ "x${use_venv}" = "xY" -o "x${use_venv}" = "xy" -o \ - "x${use_venv}" = "x" ]; then - venv=yes - else - venv=no - fi - fi -fi - -# Set up the virtual environment if requested -if [ ${venv} = yes ]; then - # Reset the virtual environment if requested - if [ ${reset} = true -a -d ${venv_path} ]; then - echo "Forced reset of virtual environment" - rm -rf ${venv_path} - fi - - # Now create the virtual environment - if [ ! -d ${venv_path} ]; then - echo "Creating virtual environment" - virtualenv ${venv_path} - if [ $? -ne 0 ]; then - echo "Failed to create virtual environment" >&2 - exit 1 - fi - fi - - echo "Installing/updating requirements in virtual environment" - pip install -U -r ${dir}/requirements.txt -r ${dir}/test-requirements.txt - if [ $? -ne 0 ]; then - echo "Failed to install/update requirements in virtual environment" >&2 - exit 1 - fi - - echo "Installing striker setup in the virtual environment" - python ${dir}/setup.py install - if [ $? -ne 0 ]; then - echo "Failed to install striker setup in virtual environment" >&2 - exit 1 - fi - - export VIRTUAL_ENV=${venv_path} -fi - -export BASE_DIR=${dir} - -################################ Run the tests ################################ - -errors=0 -if [ ${pep8} != only ]; then - # Set up the options for nosetests - options="-v" - if [ ${coverage} = yes ]; then - options="${options} --with-coverage --cover-branches" - options="${options} --cover-package=striker" - options="${options} --cover-html --cover-html-dir=${cov_html}" - fi - - # Need to restrict tests to just the test directory - if [ x"${params}" = x ]; then - params=tests - fi - - # Run nosetests - echo - echo "Testing Python code:" - echo - run_nosetests ${options} ${params} - if [ $? -ne 0 ]; then - echo "Tests on Striker failed" >&2 - errors=`expr ${errors} + 1` - fi -fi - -# Run pep8 -if [ ${pep8} != no ]; then - echo - echo "Running PEP8 tests:" - echo - run_pep8 ${dir}/striker ${dir}/tests - if [ $? -ne 0 ]; then - echo "Pep8 compliance test failed" >&2 - errors=`expr ${errors} + 1` - fi -fi - -if [ ${errors} -gt 0 ]; then - echo - echo "Test failures encountered!" >&2 -else - echo - echo "Test suite successful!" >&2 -fi - -exit ${errors} diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index f49c27b..0000000 --- a/setup.cfg +++ /dev/null @@ -1,28 +0,0 @@ -[metadata] -name = striker -summary = Rackspace Task Runner -description-file = - README.rst -author = Rackspace Hosting -author-email = striker-dev@rackspace.com -home-page = http://rackspace.com - Development Status :: 5 Alpha - Environment :: Development - Intended Audience :: Information Technology - Intended Audience :: System Administrators - License :: OSI Approved :: Apache Software License - Operating System :: POSIX :: Linux - Programming Language :: Python - Programming Language :: Python :: 2 - Programming Language :: Python :: 2.7 - Programming Language :: Python :: 2.6 - Programming Language :: Python :: 3 - Programming Language :: Python :: 3.3 - Programming Language :: Python :: 3.4 - -[files] -packages = - striker - -[wheel] -universal = 1 diff --git a/setup.py b/setup.py deleted file mode 100644 index 1a71439..0000000 --- a/setup.py +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env python -# Copyright 2014 Rackspace -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an "AS -# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language -# governing permissions and limitations under the License. -import setuptools - -# In python < 2.7.4, a lazy loading of package `pbr` will break -# setuptools if some other modules registered functions in `atexit`. -# solution from: http://bugs.python.org/issue15881#msg170215 -try: - import multiprocessing # noqa -except ImportError: - pass - -setuptools.setup( - setup_requires=['pbr'], - pbr=True) diff --git a/striker/__init__.py b/striker/__init__.py deleted file mode 100644 index 6a3ca48..0000000 --- a/striker/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2014 Rackspace -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an "AS -# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language -# governing permissions and limitations under the License. diff --git a/striker/api/__init__.py b/striker/api/__init__.py deleted file mode 100644 index 6a3ca48..0000000 --- a/striker/api/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2014 Rackspace -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an "AS -# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language -# governing permissions and limitations under the License. diff --git a/striker/cli/__init__.py b/striker/cli/__init__.py deleted file mode 100644 index 6a3ca48..0000000 --- a/striker/cli/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2014 Rackspace -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an "AS -# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language -# governing permissions and limitations under the License. diff --git a/striker/common/__init__.py b/striker/common/__init__.py deleted file mode 100644 index 6a3ca48..0000000 --- a/striker/common/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2014 Rackspace -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an "AS -# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language -# governing permissions and limitations under the License. diff --git a/striker/common/config.py b/striker/common/config.py deleted file mode 100644 index 483097a..0000000 --- a/striker/common/config.py +++ /dev/null @@ -1,1274 +0,0 @@ -# Copyright 2014 Rackspace -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an "AS -# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language -# governing permissions and limitations under the License. - -import collections -import copy -import functools -import glob -import inspect -import os - -import jsonschema -import six -import yaml - - -_unset = object() - - -class ConfigException(Exception): - """ - Configuration-related exceptions. - """ - - pass - - -def _schema_invalidate(child): - """ - Performs schema invalidation. This is an iterative function that - pushes a schema invalidation up to all the "parent" options. - Invalidating the schema ensures that it will be recomputed as - necessary. - - :param child: The ``Option`` instance or ``Config`` subclass for - which the schema will be invalidated. - """ - - seen = set([child]) - queue = [child] - while queue: - work = queue.pop(0) - - # Does it have a cached value? - if getattr(work, '_schema_cache', None) is None: - continue - - # Invalidate the cache - work._schema_cache = None - - # Add its parents to the queue - for parent in work._parents: - # Skip ones we've already processed - if parent in seen: - continue - - # Add the parent to the work queue - queue.append(parent) - seen.add(parent) - - -class Schema(object): - """ - Represent the special ``__schema__`` class attribute. An object - of this class is assigned to the ``__schema__`` class attribute of - the ``Config`` subclasses. When the value is requested, a - JSON-Schema representation is created and cached. - """ - - def __get__(self, obj, cls): - """ - Retrieve the schema corresponding to the given class. - - :param obj: An instance of a ``Config`` subclass. Ignored. - :param cls: The ``Config`` subclass. The schema will be - computed and cached in the class. - - :returns: The JSON-Schema dictionary describing the ``Config`` - subclass. - """ - - # Have we cached the schema yet? - if cls._schema_cache is None: - # Begin with a copy of the raw schema - schema = copy.deepcopy(cls._schema_raw) - - # Add in the description, if any - if cls.__doc__: - schema['description'] = cls.__doc__ - - # Assemble the property information - properties = {} - required = set() - for key, binding in cls._keys.items(): - # Add the schema for the option - properties[key] = binding.__schema__ - - # Is it required? - if binding.__default__ is _unset: - required.add(key) - - # Add that data to the schema - schema['properties'] = properties - schema['required'] = sorted(required) - - # Cache the final schema - cls._schema_cache = schema - - return cls._schema_cache - - def __set__(self, obj, value): - """ - Set the value of the schema. This is prohibited, so an - ``AttributeError`` is raised. - - :param obj: An instance of a ``Config`` subclass. - :param value: The new value for the schema. - """ - - raise AttributeError("cannot set read-only attribute '__schema__'") - - def __delete__(self, obj): - """ - Delete the value of the schema. This is prohibited, so an - ``AttributeError`` is raised. - - :param obj: An instance of a ``Config`` subclass. - """ - - raise AttributeError("cannot delete read-only attribute '__schema__'") - - -class Binding(object): - """ - Represent a binding between an attribute name, a key name, and an - option descriptor. Objects of this class are constructed by - ``ConfigMeta``, and are only used internally. A ``Binding`` is a - Python descriptor, meaning it implements a ``__get__()`` method - which performs the steps necessary to obtain a translated value - from the raw configuration. - """ - - def __init__(self, attr, key, option): - """ - Initialize a ``Binding`` object. - - :param attr: The name of the attribute the option is attached - to. - :param key: The configuration dictionary key. Under most - circumstances, this will be the same as ``attr``. - :param option: A callable, either an instance of ``Option`` or - a subclass of ``Config``. The callable will be - called with a value drawn from the - configuration, and must return the translated - value. In addition, the callable must provide - some attributes, such as ``__default__`` and - ``__schema__``. - """ - - # Store the values - self.__attr__ = attr - self.__key__ = key - self.__option__ = option - - def __call__(self, obj): - """ - Retrieve the configuration value bound to the option descriptor. - This performs memoization, for efficiency. - - :param obj: The object containing the raw configuration data - and the translation cache. - - :returns: The translated configuration data. - """ - - # Do we have a cached translation? - if self.__attr__ not in obj._xlated: - # Start with the default value - value = self.__option__.__default__ - - # See if we have a value in the configuration dictionary - if self.__key__ in obj._raw: - value = self.__option__(obj._raw[self.__key__]) - - # If we didn't find a value, raise an error - if value is _unset: - raise AttributeError( - "missing required configuration value '%s' for " - "attribute '%s'" % (self.__key__, self.__attr__)) - - # Cache the value - obj._xlated[self.__attr__] = value - - return obj._xlated[self.__attr__] - - def __getattr__(self, name): - """ - Delegate attribute retrieval to the option. This allows the - ``Binding`` object to be used as a proxy for the option - descriptor. - - :param name: The name of the attribute to retrieve. - - :returns: The value of the named attribute. - """ - - return getattr(self.__option__, name) - - def __contains__(self, name): - """ - Delegate item existence check to the option. This allows the - ``Binding`` object to be used as a proxy for the option - descriptor. - - :param name: The name of the item to check the existance of. - - :returns: A ``True`` value if the item exists, ``False`` - otherwise. - """ - - return name in self.__option__ - - def __getitem__(self, name): - """ - Delegate item retrieval to the option. This allows the - ``Binding`` object to be used as a proxy for the option - descriptor. - - :param name: The name of the item to retrieve. - - :returns: The value of the named item. - """ - - return self.__option__[name] - - def __get__(self, obj, cls): - """ - Retrieve the value of the configuration option. - - :param obj: The object containing the raw configuration data - and the translation cache. If ``None``, the - ``Binding`` instance is returned; this will proxy - for the bound option. - :param cls: The class the attribute is defined on. - - :returns: The translated configuration data. - """ - - # Return the binding if this was a class access - if obj is None: - return self - - # Instance access; return the translated configuration data - return self(obj) - - def __set__(self, obj, value): - """ - Set the value of the configuration option. This is prohibited, so - an ``AttributeError`` is raised. - - :param obj: The object containing the raw configuration data - and the translation cache. - :param value: The new value for the attribute. - """ - - raise AttributeError("cannot set read-only attribute '%s'" % - self.__attr__) - - def __delete__(self, obj): - """ - Delete the value of the configuration option. This is prohibited, - so an ``AttributeError`` is raised. - - :param obj: The object containing the raw configuration data - and the translation cache. - """ - - raise AttributeError("cannot delete read-only attribute '%s'" % - self.__attr__) - - -class COWDict(collections.MutableMapping): - """ - A simple copy-on-write dictionary class, structured to keep track - of a tree of dictionaries. This is used to allow a dictionary - tree to be modified arbitrarily, but for the changes to not be - applied to the original dictionary until the last moment. - """ - - def __init__(self, orig, root=None): - """ - Initialize a ``COWDict`` object. - - :param orig: The original dictionary. This dictionary will - not be modified until and unless the ``apply()`` - method is called. - :param root: The root of a dictionary tree. This is used - internally to track deeper dictionaries to which - changes must be applied. - """ - - # Set up basic value tracking - self._orig = orig - self._new = {} - self._lookaside = {} # tracks child COWDict objects - - # Keep track of root and children - self._root = root - self._children = [] - - # Update the root's list of children - if root is not None: - root._children.append(self) - - def __getitem__(self, key): - """ - Retrieve an item. - - :param key: The key to look up. - - :returns: The value of the key. - """ - - # Check if we've cached a COWDict for a dictionary value - if key in self._lookaside: - return self._lookaside[key] - - # OK, find the value - value = self._new.get(key, self._orig.get(key, _unset)) - if value is _unset: - raise KeyError(key) - - # If the value is a dictionary, create and cache a COWDict for - # it - if isinstance(value, dict): - # We use the trinary here to prevent self-references - self._lookaside[key] = self.__class__( - value, self if self._root is None else self._root) - return self._lookaside[key] - - # OK, return the value - return value - - def __setitem__(self, key, value): - """ - Set the value of an item. - - :param key: The key to set. - :param value: The value to set. - """ - - # Clear out lookaside... - self._lookaside.pop(key, None) - - # Check if we're resetting to the base value - if key in self._orig and self._orig[key] == value: - self._new.pop(key, None) - else: - self._new[key] = value - - def __delitem__(self, key): - """ - Delete the value of an item. - - :param key: The key to delete. - """ - - # Clear out lookaside... - self._lookaside.pop(key, None) - - # Do we need to mask the value? - if key in self._orig: - # Masking it - self._new[key] = _unset - else: - self._new.pop(key, None) - - def __iter__(self): - """ - Iterate over the keys in the dictionary. - - :returns: An iteration of the dictionary keys. - """ - - # Walk through the merged set of keys - for key in self._keys(): - if self._new.get(key) is _unset: - # Skip unset (deleted) keys - continue - - yield key - - def __len__(self): - """ - Calculate the number of elements in the dictionary. - - :returns: The number of elements in the dictionary. - """ - - # Count the merged set of keys, then subtract the number of - # deleted keys - return len(self._keys()) - list(self._new.values()).count(_unset) - - def _keys(self): - """ - Returns an unfiltered set of keys available in the original - dictionary and in our overrides. This will include deleted - keys, since they are represented as values of ``_unset``. - - :returns: A set of all keys in the original and overrides - dictionary. - """ - - return set(self._orig.keys()) | set(self._new.keys()) - - def _apply(self): - """ - Apply the changes represented by the overrides to the original - dictionary. - """ - - # Apply the changes - for key, value in self._new.items(): - if value is _unset: - self._orig.pop(key, None) - else: - self._orig[key] = value - - def apply(self): - """ - Apply the changes stored in the ``COWDict`` object to the original - dictionary tree. - """ - - # Apply to ourself first... - self._apply() - - # Now apply to the children... - for child in self._children: - child._apply() - - # Finally, clear out our stale data - self._new.clear() - self._lookaside.clear() - self._children[:] = [] - - -class Load(object): - """ - A special Python descriptor class that allows the - ``BaseConfig.load()`` method to have two different behaviors, - depending on whether it is called as a class method or an instance - method. When called as a class method, ``load()`` will load files - and return a new instance of the class; when called as an instance - method, it will load files and merge them into the configuration - instance. - """ - - def __get__(self, obj, cls): - """ - Retrieve the appropriate method to use based on how it is - accessed. If the attribute is accessed via class access, - returns the ``class_load()`` method; if accessed via instance - access, returns the ``inst_load()`` method. - - :param obj: An instance of a ``Config`` subclass. - :param cls: The ``Config`` subclass. - - :returns: The appropriate ``load()`` method to call. - """ - - # Is it class access? - if obj is None: - return functools.partial(self.class_load, cls) - - # OK, instance access - return functools.partial(self.inst_load, obj) - - @staticmethod - def _iter_files(files): - """ - A generator which iterates over a list of existing files, given a - description of the desired files. - - :param files: A list of filenames. (If a single string is - given, it will be turned into a list of one - element.) For each filename in the list, - entries which name a single file are yielded - directly; entries which name a directory result - in each file in that directory being yielded (no - recursing down subdirectories); and remaining - entries are treated as globs and any matching - files are yielded. - - :returns: An iterator over a sequence of existing file names. - Note that no attempt is made to avoid races. - """ - - # If files is not a list, wrap it in one - if isinstance(files, six.string_types): - files = [files] - - # Walk through all the files... - for fname in files: - # If it's a file, just yield it - if os.path.isfile(fname): - yield fname - - # If it's a directory, return all the files in the - # directory (sorted) - elif os.path.isdir(fname): - for entry in sorted(os.listdir(fname)): - path = os.path.join(fname, entry) - if os.path.isfile(path): - yield path - - # OK, treat it as a glob - else: - for entry in sorted(glob.glob(fname)): - if os.path.isfile(entry): - yield entry - - @staticmethod - def _merge_dict(lhs, rhs): - """ - Merges two dictionary trees into a single dictionary. - - :param lhs: The first dictionary to be merged. This - dictionary will be updated to contain the contents - of ``rhs``. - :param rhs: The second dictionary to be merged. This - dictionary will not be modified, but its contents - will become contents of ``lhs``. - """ - - # YAML files can create loops - seen = set([(id(lhs), id(rhs))]) - queue = [(lhs, rhs, [])] - work = [] - while queue: - # Get a work item - lhs, rhs, path = queue.pop(0) - - # Walk through all keys on rhs - for key, rh_value in rhs.items(): - if key not in lhs: - # OK, this is simple enough - lhs[key] = rh_value - continue - - # Get the lhs value - lh_value = lhs[key] - - # Is either value a dictionary? Coerce to int so we - # can use ^ on it - lh_dict = int(isinstance(lh_value, dict)) - rh_dict = int(isinstance(rh_value, dict)) - - # Need the key path - key_path = path + [key] - - # Check if the values are compatible - if (lh_dict ^ rh_dict) == 1: - raise ConfigException( - "/%s: type mismatch" % '/'.join(key_path)) - - # OK, if they're not dictionaries, apply the change - if lh_dict == 0: - lhs[key] = rh_value - else: - # Add another queue item - queue_id = (id(lh_value), id(rh_value)) - if queue_id not in seen: - queue.append((lh_value, rh_value, key_path)) - seen.add(queue_id) - - def _load(self, files, startwith=None): - """ - Load a list of YAML files. - - :param files: A list of filenames. (If a single string is - given, it will be turned into a list of one - element.) For each filename in the list, - entries which name a single file are loaded - directly; entries which name a directory result - in each file in that directory being loaded (no - recursing down subdirectories); and remaining - entries are treated as globs and any matching - files are loaded. - :param startwith: An optional starting dictionary. - - :returns: The final dictionary; if ``startswith`` is provided, - it will be that dictionary. - """ - - # Initialize the variables - final = startwith or {} - - # Iterate over the files - for fname in self._iter_files(files): - # Load the YAML file - with open(fname) as f: - raw = yaml.safe_load(f) - - # Merge its contents with what we've loaded so far - self._merge_dict(final, raw) - - return final - - def class_load(self, cls, files, validate=True): - """ - Loads one or more YAML files and returns an initialized instance - of the ``Config`` subclass. - - :param files: A list of filenames. (If a single string is - given, it will be turned into a list of one - element.) For each filename in the list, - entries which name a single file are loaded - directly; entries which name a directory result - in each file in that directory being loaded (no - recursing down subdirectories); and remaining - entries are treated as globs and any matching - files are loaded. - :param validate: If ``True`` (the default), the dictionary - value loaded from ``files`` will be - validated. - - :returns: An instance of the ``Config`` subclass containing - the loaded configuration. - """ - - # Begin by loading the files - raw = self._load(files) - - # Validate the value - if validate: - cls.validate(raw) - - # OK, instantiate the class and return it - return cls(raw) - - def inst_load(self, inst, files, validate=True): - """ - Loads one or more YAML files and updates the configuration stored - in the instance of the ``Config`` subclass. - - :param files: A list of filenames. (If a single string is - given, it will be turned into a list of one - element.) For each filename in the list, - entries which name a single file are loaded - directly; entries which name a directory result - in each file in that directory being loaded (no - recursing down subdirectories); and remaining - entries are treated as globs and any matching - files are loaded. - :param validate: If ``True`` (the default), the dictionary - value loaded from ``files`` will be - validated. - - :returns: Returns the instance that was updated, for - convenience. - """ - - # Begin by loading the files, using a COWDict - cow = self._load(files, COWDict(inst._raw)) - - # Validate the value - if validate: - inst.validate(cow) - - # Apply the changes - cow.apply() - - # Invalidate cached values - inst._xlated.clear() - - # Return the instance, for convenience - return inst - - -class BaseConfig(object): - """ - Base class for ``Config``. This introduces several reserved - attribute names into the ``Config`` class that are protected by - ``ConfigMeta``. - """ - - def __init__(self, value): - """ - Initialize a ``Config`` subclass. - - :param value: A dictionary containing the configuration. - """ - - # The raw configuration dictionary - self._raw = value - - # A cache containing translated values - self._xlated = {} - - @classmethod - def lookup(cls, name): - """ - Look up a ``Binding`` subclass given a name or path. - - :param name: The name of the desired ``Binding``, or a path. - If ``name`` is a simple name (i.e., not preceded - by "/"), the named attribute on this ``Config`` - subclass is returned. If ``name`` is a path - (preceded by "/", with elements separated by - "/"), the tree of options rooted at this - ``Config`` subclass is returned. Finally, - ``name`` may also be a list of path elements, - which will also result in a traversal of the tree - of options. - - :returns: An instance of ``Binding`` corresponding to the - value of ``name``. - """ - - # Do the simple tests first - if not name: - raise KeyError(name) - elif not isinstance(name, six.string_types): - # If it's just one element, look it up - if len(name) == 1: - return cls._attrs[name[0]] - - # OK, clean out any empty pieces - path = [p for p in name if p] - elif name[0] != '/': - return cls._attrs[name] - else: - # OK, split the name up - path = [p for p in name.split('/') if p] - - # Iterate down through the config tree - item = cls - for elem in path: - item = getattr(item, '_attrs', {})[elem] - - # Return the final item - return item - - @classmethod - def extend(cls, attr, option, key=None): - """ - Register a new option on the ``Config`` subclass. - - :param attr: The name of the new option. This has the same - form as the ``name`` parameter to the - ``lookup()`` method, with the restriction that - the last element of the path must not already be - defined. - :param option: A callable, either an instance of ``Option`` or - a subclass of ``Config``. The callable will be - called with a value drawn from the - configuration, and must return the translated - value. In addition, the callable must provide - some attributes, such as ``__default__`` and - ``__schema__``. - :param key: The configuration key from which the value will be - drawn. If not provided, will be the same as the - attribute name. - """ - - # Interpret attr - if not attr: - raise ConfigException('invalid attribute name') - elif not isinstance(attr, six.string_types): - # Clean out the path and pop off the last element as the - # final attribute name - path = [p for p in attr if p] - attr = path.pop() - elif attr[0] != '/': - path = [] - else: - # OK, split the name up - path = [p for p in attr.split('/') if p] - attr = path.pop() - - # Beware of the reserved attributes - if attr in RESERVED: - raise ConfigException("attribute '%s' is reserved; choose an " - "alternate name and use a key" % attr) - - # Determine the key name - if not key: - key = attr - - # Extend the desired option - if path: - ext_opt = cls.lookup(path) - ext_opt._extend(attr, key, option) - else: - cls._extend(attr, key, option) - - @classmethod - def _extend(cls, attr, key, option): - """ - Register a new option on the ``Config`` subclass. - - :param attr: The name of the attribute the option will be - available under. - :param key: The configuration key from which the value will be - drawn. - :param option: A callable, either an instance of ``Option`` or - a subclass of ``Config``. The callable will be - called with a value drawn from the - configuration, and must return the translated - value. In addition, the callable must provide - some attributes, such as ``__default__`` and - ``__schema__``. - """ - - # First, sanity-check that there's no overlaps - if attr in cls._attrs: - raise ConfigException("multiple definitions for attribute '%s'" % - attr) - elif key in cls._keys: - raise ConfigException("multiple definitions for configuration " - "key '%s'" % key) - - # Create a binding - binding = Binding(attr, key, option) - - # Put it in the trackers... - cls._attrs[attr] = binding - cls._keys[key] = binding - - # Add it to the class - setattr(cls, attr, binding) - - # Invalidate cached schemas - _schema_invalidate(cls) - - @classmethod - def validate(cls, value): - """ - Validates a configuration dictionary against this ``Config`` - subclass using JSON-Schema. Raises a - ``jsonschema.ValidationError`` if the configuration dictionary - is not valid. - - :param value: The configuration dictionary. - """ - - # Perform the validation - jsonschema.validate(value, cls.__schema__) - - load = Load() - - __schema__ = Schema() - - -# Configuration attributes that are reserved -RESERVED = frozenset(attr for attr in dir(BaseConfig) - if not attr.startswith('__')) - - -class ConfigMeta(type): - """ - Metaclass for ``Config``. This wraps ``Option`` instances and - ``Config`` subclasses in the class configuration into ``Binding`` - instances, and maintains mappings from attributes and - configuration value keys to those ``Binding`` instances. It also - initializes schema-related class attributes, such as - ``_schema_raw``, ``_schema_cache``, and ``_parents``. - """ - - def __new__(mcs, name, bases, namespace): - """ - Construct a ``Config`` subclass. - - :param name: The name of the ``Config`` subclass to construct. - :param bases: A tuple of base classes. - :param namespace: A dictionary containing the class - definition. - - :returns: A newly constructed ``Config`` subclass. - """ - - # The dictionaries mapping attributes and keys to options - attrs = {} - keys = {} - children = set() - - # Prepare the filtered namespace - filtered = { - '_attrs': attrs, - '_keys': keys, - '_schema_raw': {'type': 'object'}, - '_schema_cache': None, - '_parents': set(), - } - for attr, value in namespace.items(): - # Beware of the reserved attributes - if attr in RESERVED: - raise ConfigException("attribute '%s' is reserved; choose an " - "alternate name and use a key" % attr) - - # Treat the __schema__ attribute specially - if attr == '__schema__': - value['type'] = 'object' - filtered['_schema_raw'] = value - continue - - # Special handling for Option instances and Config - # subclasses. Note that Config cannot have any inner - # classes, as this test would blow up with a NameError - if (attr[0] != '_' and - (isinstance(value, Option) or - (inspect.isclass(value) and issubclass(value, Config)))): - # Need to update the _parents attribute later - children.add(value) - - # Derive the key - key = getattr(value, '__key__', None) or attr - - # Make sure key is valid and that there are no - # collisions - if key in keys: - raise ConfigException("multiple definitions for " - "configuration key '%s'" % key) - - # Wrap the value in a Binding - value = Binding(attr, key, value) - - # Save it in the attrs and keys dictionaries - attrs[attr] = value - keys[key] = value - - # Copy the value over into the filtered namespace - filtered[attr] = value - - # Construct the class - cls = super(ConfigMeta, mcs).__new__(mcs, name, bases, filtered) - - # Update the _parents attribute of all the child configs - for child in children: - child._parents.add(cls) - - # Return the constructed class - return cls - - -@six.add_metaclass(ConfigMeta) -class Config(BaseConfig): - """ - Configuration class. To declare a configuration, begin by - subclassing this class. Scalar options (e.g., integers, strings, - etc.) may be defined by assigning an instance of ``Option`` to an - appropriate class attribute. For dictionary options, declare an - inner class that also extends ``Config``, or create such a class - and assign it to an appropriate class attribute. (Note: for - ``Config`` subclasses, that assignment should be the class itself, - not an instance of the class.) Note that class attributes - beginning with an underscore ("_") are treated specially and - should be avoided. Also note that there are a handful of special - class methods that are not available via class instances. - - Special class attributes - - * ``__key__`` - If set on an inner class, this class attribute may be used to - override the default configuration key selection. By default, - the key associated with an inner class will be the class name; - this option allows any arbitrary key to be used. The value - will still be accessible via the normal means of accessing the - instance attribute having the name of the inner class. - - * ``__schema__`` - A dictionary containing a partial JSON-Schema dictionary. The - "type", "description", "properties", and "required" keys in - this dictionary are ignored and replaced with computed data, - with the "description" taken from the subclass docstring. Any - other values are preserved, and the constructed class will - have a ``__schema__`` property containing a complete - JSON-Schema dictionary which may be used to validate values. - Note that *instances* of the class will *not* have a - ``__schema__`` attribute or property; only the class itself - will have the final schema. - - Special class methods - - * lookup() - Given the name of an attribute or a path to a deeply nested - attribute, this method resolves that name to an instance of a - special private ``Binding`` class. The ``Binding`` class acts - as a proxy to the underlying ``Config`` subclass or ``Option`` - instance, but also includes the ``__key__`` and ``__attr__`` - attributes, which contain the configuration dictionary key and - the class attribute name, respectively. - - * extend() - Given the name of an attribute or a path to a deeply nested - attribute, this method installs a new ``Option`` instance or - ``Config`` subclass, giving it that name. This allows dynamic - extension of the configuration to support dynamically loaded - modules, such as command interpreters. It also dynamically - updates the ``__schema__`` class attribute. - - * validate() - Given a dictionary read from a file (typically via - ``yaml.load()``), this routine uses the ``jsonschema`` package - to validate that the dictionary conforms to the declared - configuration schema. The schema used for validation is drawn - from the ``__schema__`` class attribute. - """ - - pass - - -class Option(object): - """ - Describe a configuration option. This class is used to represent - all scalar configuration options, such as integers. - """ - - def __init__(self, default=_unset, help='', schema=None, - enum=None, key=None): - """ - Initialize an ``Option`` instance. - - :param default: The default value of the option. If none is - provided, the option will be required. - :param help: Help text describing the purpose of the option - and any other information required by the user. - Optional. - :param schema: A dictionary containing a partial JSON-Schema - dictionary. The "description", "default", and - "enum" keys in this dictionary are ignored and - replaced with computed data, with "description" - taken from the ``help`` parameter. Any other - values are preserved, and the final ``Option`` - instance will have a ``__schema__`` instance - attribute containing a complete JSON-Schema - dictionary which may be used to validate - values. Optional. - :param enum: A list of legal values for the option to take. - If not provided, the values that may be given are - only constrained by the declared ``schema`` for - the option. - :param key: The name of the configuration dictionary key - corresponding to the option. By default, this is - the name of the attribute to which the ``Option`` - instance is assigned. - """ - - self.__default__ = default - self.__doc__ = help - - if key: - # Only set __key__ if one is given - self.__key__ = key - - # Compute the schema - self._schema_raw = schema or {} - - # Set up default and description - if default is not _unset: - self._schema_raw['default'] = default - if help: - self._schema_raw['description'] = help - - # Include enumerated values, if specified - if enum: - self._schema_raw['enum'] = enum - - # Initialize the parents set - self._parents = set() - - def __call__(self, value): - """ - Translate the raw configuration value into the internal - representation. For scalar options described by an - ``Option``, this internal representation will be identical to - the raw configuration value. - - :param value: The raw configuration value. - - :returns: The internal representation. - """ - - return value - - def _extend(self, attr, key, option): - """ - For the ``Config`` subclasses, the ``_extend()`` method is a class - method that registers a new option. This is meaningless for - ``Option`` instances, so this implementation raises a - ``ConfigException`` to highlight those cases. - - :param attr: The name of the attribute the option will be - available under. - :param key: The configuration key from which the value will be - drawn. - :param option: A callable, either an instance of ``Option`` or - a subclass of ``Config``. The callable will be - called with a value drawn from the - configuration, and must return the translated - value. In addition, the callable must provide - some attributes, such as ``__default__`` and - ``__schema__``. - """ - - raise ConfigException("options cannot be extended") - - def validate(self, value): - """ - Validates a configuration dictionary against this ``Option`` - instance using JSON-Schema. Raises a - ``jsonschema.ValidationError`` if the configuration dictionary - is not valid. - - :param value: The configuration dictionary. - """ - - # Perform the validation - jsonschema.validate(value, self.__schema__) - - @property - def __schema__(self): - """ - Retrieve the schema for the option. - """ - - return self._schema_raw - - -class ListOption(Option): - """ - Describe a configuration option taking a list value. This may be - used with "list"-style values, where each item has the same - schema, or it may be used with "tuple"-style values, where each - item has a distinct schema that applies only to it. - """ - - def __init__(self, default=_unset, help='', schema=None, - items=None, key=None): - """ - Initialize a ``ListOption`` instance. - - :param default: The default value of the option. If none is - provided, the option will be required. - :param help: Help text describing the purpose of the option - and any other information required by the user. - Optional. - :param schema: A dictionary containing a partial JSON-Schema - dictionary. The "type", "description", - "default", and "items" keys in this dictionary - are ignored and replaced with computed data, - with "description" taken from the ``help`` - parameter and "items" taken from the ``items`` - parameter. Any other values are preserved, and - the final ``Option`` instance will have a - ``__schema__`` instance attribute containing a - complete JSON-Schema dictionary which may be - used to validate values. Optional. - :param items: May be either a single option description or a - sequence of such descriptions. (Here an "option - description" consists of either an ``Option`` - instance or a ``Config`` subclass.) If this is - a single option description, the option - description is applied to all elements of the - list in the configuration; if it is a sequence, - the elements in the sequence will be applied to - the corresponding element of the list in the - configuration. (The first is described as - "list" mode, and the second is described as - "tuple" mode.) - :param key: The name of the configuration dictionary key - corresponding to the option. By default, this is - the name of the attribute to which the ``Option`` - instance is assigned. - """ - - # Initialize the superclass - super(ListOption, self).__init__( - default=default, help=help, schema=schema, key=key) - - # Update the schema type - self._schema_raw['type'] = 'array' - - # Determine the interface mode, normalize the items, update - # parent sets, and create an appropriate _attrs dictionary for - # the lookup() algorithm - if items is None: - self._mode = 'noxlate' - self._items = None - self._attrs = {} - else: - if isinstance(items, collections.Sequence): - # Keep track of the mode, too - self._mode = 'tuple' - self._items = [] - self._attrs = {} - for idx, item in enumerate(items): - if item: - item._parents.add(self) - self._attrs['[%d]' % idx] = item - self._items.append(item or None) - else: - # We're a simple list - self._mode = 'list' - self._items = items - self._attrs = {'[]': items} - items._parents.add(self) - - # Prepare a schema cache - self._schema_cache = None - - def __call__(self, value): - """ - Translate the raw configuration value into the internal - representation. For list options, the option descriptions - passed to the ``items`` tuple will control the translation of - list items. - - :param value: The raw configuration value. - - :returns: The internal representation. - """ - - if self._mode == 'noxlate': - # Return the value unchanged - return value - elif self._mode == 'list': - # For a simple list, convert each value - return [self._items(v) for v in value] - else: - # For a tuple, convert all the items for which we have a - # value - result = [] - for idx, val in enumerate(value): - result.append(self._items[idx](val) if idx < len(self._items) - else val) - return result - - @property - def __schema__(self): - """ - Retrieve the schema for the option. - """ - - # Have we cached the schema yet? - if self._schema_cache is None: - # Begin with a copy of the raw schema - schema = copy.deepcopy(self._schema_raw) - - # Most of the schema has been initialized; we just need to - # assemble the property information - if self._mode == 'list': - schema['items'] = self._items.__schema__ - elif self._mode == 'tuple': - schema['items'] = [item.__schema__ if item else {} - for item in self._items] - # In the noxlate case, we don't set 'items' - - # Cache the final schema - self._schema_cache = schema - - return self._schema_cache diff --git a/striker/common/utils.py b/striker/common/utils.py deleted file mode 100644 index cecb7d7..0000000 --- a/striker/common/utils.py +++ /dev/null @@ -1,99 +0,0 @@ -# Copyright 2014 Rackspace -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an "AS -# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language -# governing permissions and limitations under the License. - -import os -import time - -import six - - -def canonicalize_path(cwd, path): - """ - Canonicalizes a path relative to a given working directory. - - :param cwd: The working directory to interpret ``path`` relative - to. - :param path: The path to canonicalize. If relative, it will be - interpreted relative to ``cwd``. - - :returns: The absolute path. - """ - - if not os.path.isabs(path): - path = os.path.join(cwd, path) - - return os.path.abspath(path) - - -def backoff(max_tries): - """ - A generator to perform simplified exponential backoff. Yields up - to the specified number of times, performing a ``time.sleep()`` - with an exponentially increasing sleep time (starting at 1 second) - between each trial. Yields the (0-based) trial number. - - :param max_tries: The maximum number of tries to attempt. - """ - - # How much time will we sleep next time? - sleep = 1 - - for i in range(max_tries): - # Yield the trial number - yield i - - # We've re-entered the loop; sleep, then increment the sleep - # time - time.sleep(sleep) - sleep <<= 1 - - -def boolean(value, default=None): - """ - Convert a string value into a boolean. The values 'true', 't', - 'yes', 'y', and 'on', as well as non-zero integer values, are - recognized as ``True``, while the values 'false', 'f', 'no', 'n', - and 'off', as well as the integer value 0, are recognized as - ``False``. A ``ValueError`` is raised for other values unless the - ``default`` parameter is given, in which case it is returned. - - :param value: The string value to be converted to boolean. - :param default: If not ``None``, specifies the desired default - value if the ``value`` is not one of the - recognized values. - - :returns: The boolean value derived from the string. - """ - - # Cover non-string case - if not isinstance(value, six.string_types): - return bool(value) - - # Cover the integer case - if value.isdigit(): - return bool(int(value)) - - # Check for recognized values - tmp = value.lower() - if tmp in ('true', 't', 'yes', 'y', 'on'): - return True - elif tmp in ('false', 'f', 'no', 'n', 'off'): - return False - - # Return the default value - if default is not None: - return default - - raise ValueError('invalid boolean literal %r' % value) diff --git a/striker/core/__init__.py b/striker/core/__init__.py deleted file mode 100644 index 6a3ca48..0000000 --- a/striker/core/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2014 Rackspace -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an "AS -# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language -# governing permissions and limitations under the License. diff --git a/striker/core/context.py b/striker/core/context.py deleted file mode 100644 index 4eb7471..0000000 --- a/striker/core/context.py +++ /dev/null @@ -1,123 +0,0 @@ -# Copyright 2014 Rackspace -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an "AS -# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language -# governing permissions and limitations under the License. - -from striker.core import environment - - -class Context(object): - """ - Execution context. Objects of this class contain all the basic - configuration data needed to perform a task. - """ - - def __init__(self, workspace, config, logger, - debug=False, dry_run=False, **extras): - """ - Initialize a ``Context`` object. - - :param workspace: The name of a temporary working directory. - The directory must exist. - :param config: An object containing configuration data. The - object should support read-only attribute-style - access to the configuration settings. - :param logger: An object compatible with ``logging.Logger``. - This will be used by all consumers of the - ``Context`` to emit logging information. - :param debug: A boolean, defaulting to ``False``, indicating - whether debugging mode is active. - :param dry_run: A boolean, defaulting to ``False``, indicating - whether permanent changes should be effected. - This should be used to control whether files - are uploaded, for instance. - :param extras: Keyword arguments specifying additional data to - be stored in the context. This could be, for - instance, account data. - """ - - # Store basic context data - self.workspace = workspace - self.config = config - self.logger = logger - self.debug = debug - self.dry_run = dry_run - - # Extra data--things like accounts - self._extras = extras - - # Environment - self._environ = None - - def __getattr__(self, name): - """ - Provides access to the extra data specified to the constructor. - - :param name: The name of the extra datum to retrieve. - - :returns: The value of the extra datum. - """ - - if name not in self._extras: - raise AttributeError("'%s' object has no attribute '%s'" % - (self.__class__.__name__, name)) - - return self._extras[name] - - @property - def environ(self): - """ - Access the environment. The environment is a dictionary of - environment variables, but it is also a callable that can be - used to invoke shell commands. - - :param cmd: The command to execute, as either a bare string or - a list of arguments. If a string, it will be - split into a list using ``shlex.split()``. Note - that use of bare strings for this argument is - discouraged. - :param capture_output: If ``True``, standard input and output - will be captured, and will be available - in the result. Defaults to ``False``. - Note that this is treated as implicitly - ``True`` if the ``retry`` parameter is - provided. - :param cwd: Gives an alternate working directory from which to - run the command. - :param do_raise: If ``True`` (the default), an execution - failure will raise an exception. - :param retry: If provided, must be a callable taking one - argument. Will be called with an instance of - ``ExecResult``, and can return ``True`` to - indicate that the call should be retried. - Retries are performed with an exponential - backoff controlled by ``max_tries``. - :param max_tries: The maximum number of tries to perform - before giving up, if ``retry`` is specified. - Retries are performed with an exponential - backoff: the first try is performed - immediately, and subsequent tries occur - after a sleep time that starts at one second - and is doubled for each try. - - :returns: An ``ExecResult`` object containing the results of - the execution. If the return code was non-zero and - ``do_raise`` is ``True``, this is the object that - will be raised. - """ - - # Construct the environment if necessary - if self._environ is None: - self._environ = environment.Environment(self.logger) - - return self._environ diff --git a/striker/core/environment.py b/striker/core/environment.py deleted file mode 100644 index d43e7ee..0000000 --- a/striker/core/environment.py +++ /dev/null @@ -1,301 +0,0 @@ -# Copyright 2014 Rackspace -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an "AS -# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language -# governing permissions and limitations under the License. - -import os -import shlex -import shutil -import subprocess - -import six - -from striker.common import utils - - -class ExecResult(Exception): - """ - Encapsulate the results of calling a command. This class extends - ``Exception`` so that it can be raised in the event of a command - failure. The command executed is available in both list (``cmd``) - and plain text (``cmd_text``) forms. If the command is executed - with ``capture_output``, the standard output (``stdout``) and - standard error (``stderr``) streams will also be available. The - command return code is available in the ``return_code`` attribute. - """ - - def __init__(self, cmd, stdout, stderr, return_code): - """ - Initialize an ``ExecResult``. - - :param cmd: The command, in list format. - :param stdout: The standard output from the command execution. - :param stderr: The standard error from the command execution. - :param return_code: The return code from executing the - command. - """ - - # Store all the data - self.cmd = cmd - self.stdout = stdout - self.stderr = stderr - self.return_code = return_code - - # Form the command text - comps = [] - for comp in cmd: - # Determine if the component needs quoting - if ' ' in comp or '"' in comp or "'" in comp: - # Escape any double-quotes - parts = comp.split('"') - comp = '"%s"' % '\\"'.join(parts) - - comps.append(comp) - - # Save the command text - self.cmd_text = ' '.join(comps) - - # Formulate the message - if return_code: - msg = ("'%s' failed with return code %s" % - (self.cmd_text, return_code)) - elif stderr: - msg = "'%s' said: %s" % (self.cmd_text, stderr) - elif stdout: - msg = "'%s' said: %s" % (self.cmd_text, stdout) - else: - msg = "'%s' succeeded" % (self.cmd_text,) - - # Initialize ourselves as an exception - super(ExecResult, self).__init__(msg) - - def __nonzero__(self): - """ - Allows conversion of an ``ExecResult`` to boolean, based on - the command return code. If the return code was 0, the object - will be considered ``True``; otherwise, the object will be - considered ``False``. - - :returns: ``True`` if the command succeeded, ``False`` - otherwise. - """ - - return not bool(self.return_code) - __bool__ = __nonzero__ - - -class Environment(dict): - """ - Describes an environment that can be used for execution of - subprocesses. Virtual environments can be created by calling the - ``create_venv()`` method, which returns an independent instance of - ``Environment``. - """ - - def __init__(self, logger, environ=None, cwd=None, venv_home=None): - """ - Initialize a new ``Environment``. - - :param logger: An object compatible with ``logging.Logger``. - This will be used to emit logging information. - :param environ: A dictionary containing the environment - variables. If not given, ``os.environ`` will - be used. - :param cwd: The working directory to use. If relative, will - be interpreted relative to the current working - directory. If not given, the current working - directory will be used. - :param venv_home: The home directory for the virtual - environment. - """ - - super(Environment, self).__init__(environ or os.environ) - - # Save the logger - self.logger = logger - - # Change to the desired working directory, then save the full - # path to it - self.cwd = os.getcwd() - if cwd: - self.chdir(cwd) - - # Save the virtual environment home - self.venv_home = venv_home - - def __call__(self, cmd, capture_output=False, cwd=None, do_raise=True, - retry=None, max_tries=5): - """ - Execute a command in the context of this environment. - - :param cmd: The command to execute, as either a bare string or - a list of arguments. If a string, it will be - split into a list using ``shlex.split()``. Note - that use of bare strings for this argument is - discouraged. - :param capture_output: If ``True``, standard input and output - will be captured, and will be available - in the result. Defaults to ``False``. - Note that this is treated as implicitly - ``True`` if the ``retry`` parameter is - provided. - :param cwd: Gives an alternate working directory from which to - run the command. - :param do_raise: If ``True`` (the default), an execution - failure will raise an exception. - :param retry: If provided, must be a callable taking one - argument. Will be called with an instance of - ``ExecResult``, and can return ``True`` to - indicate that the call should be retried. - Retries are performed with an exponential - backoff controlled by ``max_tries``. - :param max_tries: The maximum number of tries to perform - before giving up, if ``retry`` is specified. - Retries are performed with an exponential - backoff: the first try is performed - immediately, and subsequent tries occur - after a sleep time that starts at one second - and is doubled for each try. - - :returns: An ``ExecResult`` object containing the results of - the execution. If the return code was non-zero and - ``do_raise`` is ``True``, this is the object that - will be raised. - """ - - # Sanity-check arguments - if not retry or max_tries < 1: - max_tries = 1 - - # Determine the working directory to use - cwd = utils.canonicalize_path(self.cwd, cwd) if cwd else self.cwd - - # Turn simple strings into lists of tokens - if isinstance(cmd, six.string_types): - self.logger.debug("Notice: splitting command string '%s'" % - cmd) - cmd = shlex.split(cmd) - - self.logger.debug("Executing command: %r (cwd %s)" % (cmd, cwd)) - - # Prepare the keyword arguments for the Popen call - kwargs = { - 'env': self, - 'cwd': cwd, - 'close_fds': True, - } - - # Set up stdout and stderr - if capture_output or (retry and max_tries > 1): - kwargs.update({ - 'stdout': subprocess.PIPE, - 'stderr': subprocess.PIPE, - }) - - # Perform the tries in a loop - for trial in utils.backoff(max_tries): - if trial: - self.logger.warn("Failure caught; retrying command " - "(try #%d)" % (trial + 1)) - - # Call the command - child = subprocess.Popen(cmd, **kwargs) - stdout, stderr = child.communicate() - result = ExecResult(cmd, stdout, stderr, child.returncode) - - # Check if we need to retry - if retry and not result and retry(result): - continue - - break - else: - # Just log a warning that we couldn't retry - self.logger.warn("Unable to retry: too many attempts") - - # Raise an exception if requested - if not result and do_raise: - raise result - - return result - - def chdir(self, path): - """ - Change the working directory. - - :param path: The path to change to. If relative, will be - interpreted relative to the current working - directory. - - :returns: The new working directory. - """ - - self.cwd = utils.canonicalize_path(self.cwd, path) - - return self.cwd - - def create_venv(self, path, rebuild=False, **kwargs): - """ - Create a new, bare virtual environment rooted at the given - directory. No packages will be installed, except what - ``virtualenv`` installs. Returns a new ``Environment`` set up - for the new virtual environment, with the working directory - set to be the same as the virtual environment directory. Any - keyword arguments will override system environment variables - in the new ``Environment`` object. - - :param path: The path to create the virtual environment in. - If relative, will be interpreted relative to the - current working directory. - :param rebuild: If ``True``, the virtual environment will be - rebuilt even if it already exists. If - ``False`` (the default), the virtual - environment will only be rebuilt if it doesn't - already exist. - :returns: A new ``Environment`` object. - """ - - # Determine the new virtual environment path - path = utils.canonicalize_path(self.cwd, path) - - self.logger.debug("Preparing virtual environment %s" % path) - - # Check if we need to rebuild the virtual environment - if os.path.exists(path): - if rebuild: - # Blow away the old tree - self.logger.info("Destroying old virtual environment %s" % - path) - shutil.rmtree(path) - else: - self.logger.info("Using existing virtual environment %s" % - path) - else: - # We'll need to create it - rebuild = True - - # Create the new virtual environment - if rebuild: - self.logger.info("Creating virtual environment %s" % path) - self(['virtualenv', path]) - - # Set up the environment variables that are needed - kwargs.setdefault('VIRTUAL_ENV', path) - bindir = os.path.join(path, 'bin') - kwargs.setdefault('PATH', '%s%s%s' % - (bindir, os.pathsep, self['PATH'])) - - # Set up and return the new Environment - new_env = self.__class__(self.logger, environ=self, cwd=path, - venv_home=path) - new_env.update(kwargs) - return new_env diff --git a/test-requirements.txt b/test-requirements.txt deleted file mode 100644 index 9bb6cda..0000000 --- a/test-requirements.txt +++ /dev/null @@ -1,6 +0,0 @@ -coverage -mock -nose -pep8 -sphinx>=1.1.2,!=1.2.0,<1.3 -tox diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index a95d851..0000000 --- a/tests/__init__.py +++ /dev/null @@ -1,121 +0,0 @@ -# Copyright 2014 Rackspace -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an "AS -# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language -# governing permissions and limitations under the License. - -import six - - -class TestException(Exception): - """ - An exception for the use of tests. - """ - - pass - - -def fake_join(a, *p): - """ - Lifted from the POSIX implementation of os.path, for testing - purposes. - """ - - path = a - for b in p: - if b.startswith('/'): - path = b - elif path == '' or path.endswith('/'): - path += b - else: - path += '/' + b - return path - - -def fake_isabs(s): - """ - Lifted from the POSIX implementation of os.path, for testing - purposes. - """ - - return s.startswith('/') - - -def fake_abspath(path): - """ - Lifted from the POSIX implementation of os.path, for testing - purposes. - """ - - if not fake_isabs(path): - if six.PY2 and isinstance(path, unicode): - cwd = os.getcwdu() - elif six.PY3 and isinstance(path, bytes): - cwd = os.getcwdb() - else: - cwd = os.getcwd() - path = fake_join(cwd, path) - return fake_normpath(path) - - -def fake_normpath(path): - """ - Lifted from the POSIX implementation of os.path, for testing - purposes. - """ - - if six.PY2 and isinstance(path, unicode): - sep = u'/' - empty = u'' - dot = u'.' - dotdot = u'..' - elif six.PY3 and isinstance(path, bytes): - sep = b'/' - empty = b'' - dot = b'.' - dotdot = b'..' - else: - sep = '/' - empty = '' - dot = '.' - dotdot = '..' - - if path == empty: - return dot - - initial_slashes = path.startswith(sep) - - # POSIX allows one or two initial slashes, but treats three or more - # as single slash. - if (initial_slashes and - path.startswith(sep * 2) and not path.startswith(sep * 3)): - initial_slashes = 2 - - comps = path.split(sep) - new_comps = [] - for comp in comps: - if comp in (empty, dot): - continue - - if (comp != dotdot or (not initial_slashes and not new_comps) or - (new_comps and new_comps[-1] == dotdot)): - new_comps.append(comp) - elif new_comps: - new_comps.pop() - - comps = new_comps - path = sep.join(comps) - - if initial_slashes: - path = sep * initial_slashes + path - - return path or dot diff --git a/tests/function/__init__.py b/tests/function/__init__.py deleted file mode 100644 index 6a3ca48..0000000 --- a/tests/function/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2014 Rackspace -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an "AS -# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language -# governing permissions and limitations under the License. diff --git a/tests/function/api/__init__.py b/tests/function/api/__init__.py deleted file mode 100644 index 6a3ca48..0000000 --- a/tests/function/api/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2014 Rackspace -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an "AS -# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language -# governing permissions and limitations under the License. diff --git a/tests/function/cli/__init__.py b/tests/function/cli/__init__.py deleted file mode 100644 index 6a3ca48..0000000 --- a/tests/function/cli/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2014 Rackspace -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an "AS -# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language -# governing permissions and limitations under the License. diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py deleted file mode 100644 index 6a3ca48..0000000 --- a/tests/unit/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2014 Rackspace -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an "AS -# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language -# governing permissions and limitations under the License. diff --git a/tests/unit/api/__init__.py b/tests/unit/api/__init__.py deleted file mode 100644 index 6a3ca48..0000000 --- a/tests/unit/api/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2014 Rackspace -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an "AS -# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language -# governing permissions and limitations under the License. diff --git a/tests/unit/cli/__init__.py b/tests/unit/cli/__init__.py deleted file mode 100644 index 6a3ca48..0000000 --- a/tests/unit/cli/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2014 Rackspace -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an "AS -# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language -# governing permissions and limitations under the License. diff --git a/tests/unit/common/__init__.py b/tests/unit/common/__init__.py deleted file mode 100644 index 6a3ca48..0000000 --- a/tests/unit/common/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2014 Rackspace -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an "AS -# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language -# governing permissions and limitations under the License. diff --git a/tests/unit/common/test_config.py b/tests/unit/common/test_config.py deleted file mode 100644 index bbfd07b..0000000 --- a/tests/unit/common/test_config.py +++ /dev/null @@ -1,1472 +0,0 @@ -# Copyright 2014 Rackspace -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an "AS -# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language -# governing permissions and limitations under the License. - -import collections -import contextlib -import fnmatch -import unittest - -import mock -from six.moves import builtins - -from striker.common import config - -import tests - - -class SchemaInvalidateTest(unittest.TestCase): - def test_base(self): - parent1 = mock.Mock(spec=['_parents', '_schema_cache'], - _schema_cache='parent1_cached', - _parents=set(['foo'])) - parent2 = mock.Mock(spec=['_parents'], _parents=set([parent1])) - parent3 = mock.Mock(spec=['_schema_cache', '_parents'], - _schema_cache='parent3_cached', _parents=set()) - parent4 = mock.Mock(spec=['_schema_cache', '_parents'], - _schema_cache='parent4_cached', - _parents=set([parent2])) - child = mock.Mock(spec=['_parents', '_schema_cache'], - _schema_cache='child_cached', - _parents=set([parent4, parent3])) - - config._schema_invalidate(child) - - self.assertEqual(child._schema_cache, None) - self.assertEqual(parent1._schema_cache, 'parent1_cached') - self.assertEqual(parent3._schema_cache, None) - self.assertEqual(parent4._schema_cache, None) - - def test_parent_loop(self): - # Should never happen, but just in case... - parent1 = mock.Mock(spec=['_parents', '_schema_cache'], - _schema_cache='parent1_cached', - _parents=set()) - parent2 = mock.Mock(spec=['_parents', '_schema_cache'], - _schema_cache='parent2_cached', - _parents=set([parent1])) - parent1._parents.add(parent2) - child = mock.Mock(spec=['_parents', '_schema_cache'], - _schema_cache='child_cached', - _parents=set([parent2])) - - config._schema_invalidate(child) - - self.assertEqual(child._schema_cache, None) - self.assertEqual(parent1._schema_cache, None) - self.assertEqual(parent2._schema_cache, None) - - -class SchemaTest(unittest.TestCase): - def test_get_cached(self): - cls = mock.Mock( - __doc__='', - _keys={}, - _schema_raw={}, - _schema_cache='cached', - ) - schema = config.Schema() - - result = schema.__get__(None, cls) - - self.assertEqual(result, 'cached') - self.assertEqual(cls._schema_cache, 'cached') - - def test_get_uncached(self): - opt1 = mock.Mock( - spec=config.Option, - _parents=set(), - __schema__='opt1_sch', - __default__='default', - ) - opt2 = mock.Mock( - spec=config.Option, - _parents=set(), - __schema__='opt2_sch', - __default__=config._unset, - ) - cls = mock.Mock( - __doc__='description', - _keys={ - 'option1': opt1, - 'option2': opt2, - }, - _schema_raw={'extra': 'data'}, - _schema_cache=None, - ) - schema = config.Schema() - - result = schema.__get__(None, cls) - - expected = { - 'description': 'description', - 'extra': 'data', - 'properties': { - 'option1': 'opt1_sch', - 'option2': 'opt2_sch', - }, - 'required': ['option2'], - } - self.assertEqual(result, expected) - self.assertEqual(cls._schema_cache, expected) - - def test_get_uncached_nodoc(self): - opt1 = mock.Mock( - spec=config.Option, - _parents=set(), - __schema__='opt1_sch', - __default__='default', - ) - opt2 = mock.Mock( - spec=config.Option, - _parents=set(), - __schema__='opt2_sch', - __default__=config._unset, - ) - cls = mock.Mock( - __doc__=None, - _keys={ - 'option1': opt1, - 'option2': opt2, - }, - _schema_raw={'extra': 'data'}, - _schema_cache=None, - ) - schema = config.Schema() - - result = schema.__get__(None, cls) - - expected = { - 'extra': 'data', - 'properties': { - 'option1': 'opt1_sch', - 'option2': 'opt2_sch', - }, - 'required': ['option2'], - } - self.assertEqual(result, expected) - self.assertEqual(cls._schema_cache, expected) - - def test_set(self): - schema = config.Schema() - - self.assertRaises(AttributeError, schema.__set__, 'obj', 'value') - - def test_delete(self): - schema = config.Schema() - - self.assertRaises(AttributeError, schema.__delete__, 'obj') - - -class BindingTest(unittest.TestCase): - def test_init(self): - binding = config.Binding('attr', 'key', 'option') - - self.assertEqual(binding.__attr__, 'attr') - self.assertEqual(binding.__key__, 'key') - self.assertEqual(binding.__option__, 'option') - - def test_call_cached(self): - option = mock.Mock(return_value='converted', __default__='default') - obj = mock.Mock(_raw={'key': 'config'}, _xlated={'attr': 'cached'}) - binding = config.Binding('attr', 'key', option) - - result = binding(obj) - - self.assertEqual(result, 'cached') - self.assertEqual(obj._raw, {'key': 'config'}) - self.assertEqual(obj._xlated, {'attr': 'cached'}) - self.assertFalse(option.called) - - def test_call_translate(self): - option = mock.Mock(return_value='converted', __default__='default') - obj = mock.Mock(_raw={'key': 'config'}, _xlated={}) - binding = config.Binding('attr', 'key', option) - - result = binding(obj) - - self.assertEqual(result, 'converted') - self.assertEqual(obj._raw, {'key': 'config'}) - self.assertEqual(obj._xlated, {'attr': 'converted'}) - option.assert_called_once_with('config') - - def test_call_default(self): - option = mock.Mock(return_value='converted', __default__='default') - obj = mock.Mock(_raw={}, _xlated={}) - binding = config.Binding('attr', 'key', option) - - result = binding(obj) - - self.assertEqual(result, 'default') - self.assertEqual(obj._raw, {}) - self.assertEqual(obj._xlated, {'attr': 'default'}) - self.assertFalse(option.called) - - def test_call_unset(self): - option = mock.Mock(return_value='converted', __default__=config._unset) - obj = mock.Mock(_raw={}, _xlated={}) - binding = config.Binding('attr', 'key', option) - - self.assertRaises(AttributeError, binding, obj) - self.assertEqual(obj._raw, {}) - self.assertEqual(obj._xlated, {}) - self.assertFalse(option.called) - - def test_getattr(self): - option = mock.Mock(opt_attr='spam') - binding = config.Binding('attr', 'key', option) - - self.assertEqual(binding.opt_attr, 'spam') - - def test_contains(self): - option = mock.MagicMock() - option.__contains__.return_value = True - binding = config.Binding('attr', 'key', option) - - self.assertTrue('spam' in binding) - - def test_getitem(self): - option = mock.MagicMock() - option.__getitem__.return_value = 'value' - binding = config.Binding('attr', 'key', option) - - self.assertEqual(binding['spam'], 'value') - - @mock.patch.object(config.Binding, '__call__', return_value='spam') - def test_get_cls(self, mock_call): - binding = config.Binding('attr', 'key', 'option') - - result = binding.__get__(None, 'class') - - self.assertEqual(result, binding) - self.assertFalse(mock_call.called) - - @mock.patch.object(config.Binding, '__call__', return_value='spam') - def test_get_obj(self, mock_call): - binding = config.Binding('attr', 'key', 'option') - - result = binding.__get__('obj', 'class') - - self.assertEqual(result, 'spam') - mock_call.assert_called_once_with('obj') - - def test_set(self): - binding = config.Binding('attr', 'key', 'option') - - self.assertRaises(AttributeError, binding.__set__, 'obj', 'value') - - def test_delete(self): - binding = config.Binding('attr', 'key', 'option') - - self.assertRaises(AttributeError, binding.__delete__, 'obj') - - -class COWDictTest(unittest.TestCase): - def test_init_base(self): - result = config.COWDict('orig') - - self.assertEqual(result._orig, 'orig') - self.assertEqual(result._new, {}) - self.assertEqual(result._lookaside, {}) - self.assertEqual(result._root, None) - self.assertEqual(result._children, []) - - def test_init_root(self): - root = mock.Mock(_children=[1, 2]) - - result = config.COWDict('orig', root) - - self.assertEqual(result._orig, 'orig') - self.assertEqual(result._new, {}) - self.assertEqual(result._lookaside, {}) - self.assertEqual(result._root, root) - self.assertEqual(result._children, []) - self.assertEqual(root._children, [1, 2, result]) - - def test_getitem_lookaside(self): - orig = {'a': 1, 'b': 2, 'c': 3} - cowd = config.COWDict(orig) - cowd._lookaside['b'] = 'lookaside' - - self.assertEqual(cowd['b'], 'lookaside') - - def test_getitem_unset(self): - orig = {'a': 1, 'b': 2, 'c': 3} - cowd = config.COWDict(orig) - - self.assertRaises(KeyError, lambda: cowd['d']) - - def test_getitem_deleted(self): - orig = {'a': 1, 'b': 2, 'c': 3} - cowd = config.COWDict(orig) - cowd._new['b'] = config._unset - - self.assertRaises(KeyError, lambda: cowd['b']) - - def test_getitem_dict(self): - orig = {'a': 1, 'b': 2, 'c': {'ca': 31, 'cb': 32, 'cc': 33}} - cowd = config.COWDict(orig) - - result = cowd['c'] - - self.assertTrue(isinstance(result, config.COWDict)) - self.assertEqual(id(result._orig), id(orig['c'])) - self.assertEqual(result._root, cowd) - self.assertEqual(cowd._lookaside, {'c': result}) - - def test_getitem_subdict(self): - root = mock.Mock(_children=[]) - orig = {'a': 1, 'b': 2, 'c': {'ca': 31, 'cb': 32, 'cc': 33}} - cowd = config.COWDict(orig, root) - - result = cowd['c'] - - self.assertTrue(isinstance(result, config.COWDict)) - self.assertEqual(id(result._orig), id(orig['c'])) - self.assertEqual(result._root, root) - self.assertEqual(cowd._lookaside, {'c': result}) - - def test_getitem_base(self): - orig = {'a': 1, 'b': 2, 'c': 3} - cowd = config.COWDict(orig) - - self.assertEqual(cowd['b'], 2) - - def test_getitem_overridden(self): - orig = {'a': 1, 'b': 2, 'c': 3} - cowd = config.COWDict(orig) - cowd._new['b'] = 4 - - self.assertEqual(cowd['b'], 4) - - def test_setitem_base(self): - orig = {'a': 1, 'b': 2, 'c': 3} - cowd = config.COWDict(orig) - cowd._lookaside['b'] = 'lookaside' - - cowd['b'] = 4 - - self.assertEqual(cowd._orig, {'a': 1, 'b': 2, 'c': 3}) - self.assertEqual(cowd._new, {'b': 4}) - self.assertEqual(cowd._lookaside, {}) - - def test_setitem_reset(self): - orig = {'a': 1, 'b': 2, 'c': 3} - cowd = config.COWDict(orig) - cowd._lookaside['b'] = 'lookaside' - - cowd['b'] = 2 - - self.assertEqual(cowd._orig, {'a': 1, 'b': 2, 'c': 3}) - self.assertEqual(cowd._new, {}) - self.assertEqual(cowd._lookaside, {}) - - def test_delitem_base(self): - orig = {'a': 1, 'b': 2, 'c': 3} - cowd = config.COWDict(orig) - cowd._new['d'] = 4 - cowd._lookaside['d'] = 'lookaside' - - del cowd['d'] - - self.assertEqual(cowd._orig, {'a': 1, 'b': 2, 'c': 3}) - self.assertEqual(cowd._new, {}) - self.assertEqual(cowd._lookaside, {}) - - def test_delitem_override(self): - orig = {'a': 1, 'b': 2, 'c': 3} - cowd = config.COWDict(orig) - cowd._new['b'] = 4 - cowd._lookaside['b'] = 'lookaside' - - del cowd['b'] - - self.assertEqual(cowd._orig, {'a': 1, 'b': 2, 'c': 3}) - self.assertEqual(cowd._new, {'b': config._unset}) - self.assertEqual(cowd._lookaside, {}) - - def test_iter(self): - orig = {'a': 1, 'b': 2, 'c': 3} - cowd = config.COWDict(orig) - cowd._new = {'b': config._unset, 'd': 4} - - result = sorted(iter(cowd)) - - self.assertEqual(result, ['a', 'c', 'd']) - - def test_len(self): - orig = {'a': 1, 'b': 2, 'c': 3} - cowd = config.COWDict(orig) - cowd._new = {'b': config._unset, 'd': 4} - - self.assertEqual(len(cowd), 3) - - def test_keys(self): - orig = {'a': 1, 'b': 2, 'c': 3} - cowd = config.COWDict(orig) - cowd._new = {'b': config._unset, 'd': 4} - - result = cowd._keys() - - self.assertEqual(result, set(['a', 'b', 'c', 'd'])) - - def test_apply_internal(self): - orig = {'a': 1, 'b': 2, 'c': 3} - cowd = config.COWDict(orig) - cowd._new = {'b': config._unset, 'd': 4} - - cowd._apply() - - self.assertEqual(orig, {'a': 1, 'c': 3, 'd': 4}) - - @mock.patch.object(config.COWDict, '_apply') - def test_apply(self, mock_apply): - children = [mock.Mock(), mock.Mock(), mock.Mock()] - cowd = config.COWDict({}) - cowd._children = children[:] - cowd._new = {'a': 3, 'b': 2, 'c': 1} - cowd._lookaside = {'a': 1, 'b': 2, 'c': 3} - - cowd.apply() - - mock_apply.assert_called_once_with() - for child in children: - child._apply.assert_called_once_with() - self.assertEqual(len(children), 3) - self.assertEqual(cowd._new, {}) - self.assertEqual(cowd._lookaside, {}) - self.assertEqual(cowd._children, []) - - -class LoadTest(unittest.TestCase): - @mock.patch('functools.partial', return_value='partial') - def test_get_class(self, mock_partial): - load = config.Load() - - result = load.__get__(None, 'cls') - - self.assertEqual(result, 'partial') - mock_partial.assert_called_once_with(load.class_load, 'cls') - - @mock.patch('functools.partial', return_value='partial') - def test_get_inst(self, mock_partial): - load = config.Load() - - result = load.__get__('inst', 'cls') - - self.assertEqual(result, 'partial') - mock_partial.assert_called_once_with(load.inst_load, 'inst') - - DIR = 'dir' - FILE = 'file' - - fs_mock = collections.namedtuple( - 'fs_mock', ['mock_isfile', 'mock_isdir', 'mock_listdir', - 'mock_glob', 'mock_join']) - - @contextlib.contextmanager - def mock_fs(self, files): - def fake_isfile(name): - return files.get(name) == self.FILE - patch_isfile = mock.patch('os.path.isfile', side_effect=fake_isfile) - - def fake_isdir(name): - return files.get(name) == self.DIR - patch_isdir = mock.patch('os.path.isdir', side_effect=fake_isdir) - - def fake_listdir(dirname): - dirname = dirname.rstrip('/') + '/' - return list(set(key[len(dirname):].lstrip('/').split('/')[0] - for key in files.keys() - if key.startswith(dirname))) - patch_listdir = mock.patch('os.listdir', side_effect=fake_listdir) - - def fake_glob(pattern): - # Note this isn't fully accurate; /foo/* will match - # /foo/bar/baz - return fnmatch.filter(files.keys(), pattern) - patch_glob = mock.patch('glob.glob', side_effect=fake_glob) - - patch_join = mock.patch('os.path.join', side_effect=tests.fake_join) - - # Start the mocks and build the tuple we're yielding - fs_mocks = self.fs_mock( - patch_isfile.start(), - patch_isdir.start(), - patch_listdir.start(), - patch_glob.start(), - patch_join.start(), - ) - - try: - yield fs_mocks - finally: - patch_join.stop() - patch_glob.stop() - patch_listdir.stop() - patch_isdir.stop() - patch_isfile.stop() - - def test_iter_files_file(self): - with self.mock_fs({ - 'one': self.FILE, - }) as fs_mocks: - result = list(config.Load._iter_files('one')) - - self.assertEqual(result, ['one']) - fs_mocks.mock_isfile.assert_called_once_with('one') - self.assertFalse(fs_mocks.mock_isdir.called) - self.assertFalse(fs_mocks.mock_listdir.called) - self.assertFalse(fs_mocks.mock_glob.called) - - def test_iter_files_dir(self): - with self.mock_fs({ - 'dir': self.DIR, - 'dir/one': self.FILE, - 'dir/two': self.FILE, - 'dir/three': self.DIR, - 'dir/four': self.FILE, - }) as fs_mocks: - result = list(config.Load._iter_files('dir')) - - self.assertEqual(result, ['dir/four', 'dir/one', 'dir/two']) - fs_mocks.mock_isfile.assert_has_calls([ - mock.call('dir'), - mock.call('dir/four'), - mock.call('dir/one'), - mock.call('dir/three'), - mock.call('dir/two'), - ]) - self.assertEqual(fs_mocks.mock_isfile.call_count, 5) - fs_mocks.mock_isdir.assert_called_once_with('dir') - fs_mocks.mock_listdir.assert_called_once_with('dir') - self.assertFalse(fs_mocks.mock_glob.called) - - def test_iter_files_glob(self): - with self.mock_fs({ - 'bad_one': self.FILE, - 'bad_two': self.FILE, - 'bad_three': self.DIR, - 'bad_four': self.FILE, - 'good_one': self.FILE, - 'good_two': self.FILE, - 'good_three': self.DIR, - 'good_four': self.FILE, - }) as fs_mocks: - result = list(config.Load._iter_files('good_*')) - - self.assertEqual(result, ['good_four', 'good_one', 'good_two']) - fs_mocks.mock_isfile.assert_has_calls([ - mock.call('good_*'), - mock.call('good_four'), - mock.call('good_one'), - mock.call('good_three'), - mock.call('good_two'), - ]) - self.assertEqual(fs_mocks.mock_isfile.call_count, 5) - fs_mocks.mock_isdir.assert_called_once_with('good_*') - self.assertFalse(fs_mocks.mock_listdir.called) - fs_mocks.mock_glob.assert_called_once_with('good_*') - - def test_iter_files_list(self): - with self.mock_fs({ - 'one': self.FILE, - 'two': self.FILE, - 'four': self.FILE, - 'five': self.FILE, - }) as fs_mocks: - result = list(config.Load._iter_files( - ['one', 'two', 'three', 'four', 'five'])) - - self.assertEqual(result, ['one', 'two', 'four', 'five']) - fs_mocks.mock_isfile.assert_has_calls([ - mock.call('one'), - mock.call('two'), - mock.call('three'), - mock.call('four'), - mock.call('five'), - ]) - self.assertEqual(fs_mocks.mock_isfile.call_count, 5) - fs_mocks.mock_isdir.assert_called_once_with('three') - self.assertFalse(fs_mocks.mock_listdir.called) - fs_mocks.mock_glob.assert_called_once_with('three') - - def test_merge_dict_unchanged(self): - lhs = { - 'a': 1, - 'b': 2, - 'c': 3, - } - rhs = {} - - config.Load._merge_dict(lhs, rhs) - - self.assertEqual(lhs, { - 'a': 1, - 'b': 2, - 'c': 3, - }) - self.assertEqual(rhs, {}) - - def test_merge_dict_flat(self): - lhs = { - 'a': 1, - 'b': 2, - 'c': 3, - } - rhs = { - 'b': 12, - 'd': 14, - } - - config.Load._merge_dict(lhs, rhs) - - self.assertEqual(lhs, { - 'a': 1, - 'b': 12, - 'c': 3, - 'd': 14, - }) - self.assertEqual(rhs, { - 'b': 12, - 'd': 14, - }) - - def test_merge_dict_nested(self): - lhs = { - 'a': 1, - 'b': 2, - 'c': { - 'ca': 31, - 'cb': 32, - 'cc': 33, - }, - } - rhs = { - 'b': 12, - 'c': { - 'cb': 132, - 'cd': 134, - }, - 'd': 14, - } - - config.Load._merge_dict(lhs, rhs) - - self.assertEqual(lhs, { - 'a': 1, - 'b': 12, - 'c': { - 'ca': 31, - 'cb': 132, - 'cc': 33, - 'cd': 134, - }, - 'd': 14, - }) - self.assertEqual(rhs, { - 'b': 12, - 'c': { - 'cb': 132, - 'cd': 134, - }, - 'd': 14, - }) - - def test_merge_dict_loop(self): - loop_lhs = { - 'loop_a': 91, - 'loop_b': 92, - 'loop_c': 93, - } - loop_rhs = { - 'loop_b': 192, - 'loop_d': 194, - } - lhs = { - 'a': 1, - 'b': loop_lhs, - 'c': loop_lhs, - } - rhs = { - 'b': loop_rhs, - 'c': loop_rhs, - 'd': 4, - } - - config.Load._merge_dict(lhs, rhs) - - self.assertEqual(lhs, { - 'a': 1, - 'b': { - 'loop_a': 91, - 'loop_b': 192, - 'loop_c': 93, - 'loop_d': 194, - }, - 'c': { - 'loop_a': 91, - 'loop_b': 192, - 'loop_c': 93, - 'loop_d': 194, - }, - 'd': 4, - }) - self.assertEqual(rhs, { - 'b': { - 'loop_b': 192, - 'loop_d': 194, - }, - 'c': { - 'loop_b': 192, - 'loop_d': 194, - }, - 'd': 4, - }) - - def test_merge_dict_nondict_lhs(self): - lhs = { - 'a': 1, - 'b': 2, - 'c': 3, - } - rhs = { - 'b': 12, - 'c': { - 'cb': 132, - 'cd': 134, - }, - 'd': 14, - } - - self.assertRaises(config.ConfigException, config.Load._merge_dict, - lhs, rhs) - self.assertEqual(rhs, { - 'b': 12, - 'c': { - 'cb': 132, - 'cd': 134, - }, - 'd': 14, - }) - - def test_merge_dict_nondict_rhs(self): - lhs = { - 'a': 1, - 'b': 2, - 'c': { - 'cb': 32, - 'cd': 34, - }, - } - rhs = { - 'b': 12, - 'c': 13, - 'd': 14, - } - - self.assertRaises(config.ConfigException, config.Load._merge_dict, - lhs, rhs) - self.assertEqual(rhs, { - 'b': 12, - 'c': 13, - 'd': 14, - }) - - @mock.patch.object(builtins, 'open') - @mock.patch('yaml.safe_load', side_effect=lambda s: s.data) - @mock.patch.object(config.Load, '_iter_files', - return_value=['file1', 'file2', 'file3', 'file4']) - @mock.patch.object(config.Load, '_merge_dict') - def test_load_basic(self, mock_merge_dict, mock_iter_files, - mock_safe_load, mock_open): - files = { - 'file1': mock.MagicMock(data='file1_data'), - 'file2': mock.MagicMock(data='file2_data'), - 'file3': mock.MagicMock(data='file3_data'), - 'file4': mock.MagicMock(data='file4_data'), - } - for fobj in files.values(): - fobj.__enter__.return_value = fobj - mock_open.side_effect = lambda fname: files[fname] - load = config.Load() - - result = load._load('files') - - self.assertEqual(result, {}) - mock_iter_files.assert_called_once_with('files') - mock_open.assert_has_calls([ - mock.call('file1'), - mock.call('file2'), - mock.call('file3'), - mock.call('file4'), - ]) - self.assertEqual(mock_open.call_count, 4) - mock_safe_load.assert_has_calls([ - mock.call(files['file1']), - mock.call(files['file2']), - mock.call(files['file3']), - mock.call(files['file4']), - ]) - self.assertEqual(mock_safe_load.call_count, 4) - mock_merge_dict.assert_has_calls([ - mock.call({}, 'file1_data'), - mock.call({}, 'file2_data'), - mock.call({}, 'file3_data'), - mock.call({}, 'file4_data'), - ]) - self.assertEqual(mock_merge_dict.call_count, 4) - - @mock.patch.object(builtins, 'open') - @mock.patch('yaml.safe_load', side_effect=lambda s: s.data) - @mock.patch.object(config.Load, '_iter_files', - return_value=['file1', 'file2', 'file3', 'file4']) - @mock.patch.object(config.Load, '_merge_dict') - def test_load_startwith(self, mock_merge_dict, mock_iter_files, - mock_safe_load, mock_open): - files = { - 'file1': mock.MagicMock(data='file1_data'), - 'file2': mock.MagicMock(data='file2_data'), - 'file3': mock.MagicMock(data='file3_data'), - 'file4': mock.MagicMock(data='file4_data'), - } - for fobj in files.values(): - fobj.__enter__.return_value = fobj - mock_open.side_effect = lambda fname: files[fname] - load = config.Load() - - result = load._load('files', 'startwith') - - self.assertEqual(result, 'startwith') - mock_iter_files.assert_called_once_with('files') - mock_open.assert_has_calls([ - mock.call('file1'), - mock.call('file2'), - mock.call('file3'), - mock.call('file4'), - ]) - self.assertEqual(mock_open.call_count, 4) - mock_safe_load.assert_has_calls([ - mock.call(files['file1']), - mock.call(files['file2']), - mock.call(files['file3']), - mock.call(files['file4']), - ]) - self.assertEqual(mock_safe_load.call_count, 4) - mock_merge_dict.assert_has_calls([ - mock.call('startwith', 'file1_data'), - mock.call('startwith', 'file2_data'), - mock.call('startwith', 'file3_data'), - mock.call('startwith', 'file4_data'), - ]) - self.assertEqual(mock_merge_dict.call_count, 4) - - @mock.patch.object(config.Load, '_load', return_value='raw') - def test_class_load_validate(self, mock_load): - cls = mock.Mock(return_value='instance') - load = config.Load() - - result = load.class_load(cls, 'files') - - self.assertEqual(result, 'instance') - mock_load.assert_called_once_with('files') - cls.validate.assert_called_once_with('raw') - cls.assert_called_once_with('raw') - - @mock.patch.object(config.Load, '_load', return_value='raw') - def test_class_load_novalidate(self, mock_load): - cls = mock.Mock(return_value='instance') - load = config.Load() - - result = load.class_load(cls, 'files', False) - - self.assertEqual(result, 'instance') - mock_load.assert_called_once_with('files') - self.assertFalse(cls.validate.called) - cls.assert_called_once_with('raw') - - @mock.patch.object(config, 'COWDict', return_value='cow') - @mock.patch.object(config.Load, '_load') - def test_inst_load_validate(self, mock_load, mock_COWDict): - inst = mock.Mock(_raw='raw') - load = config.Load() - - result = load.inst_load(inst, 'files') - - self.assertEqual(result, inst) - mock_COWDict.assert_called_once_with('raw') - mock_load.assert_called_once_with('files', 'cow') - cow = mock_load.return_value - inst.validate.assert_called_once_with(cow) - cow.apply.assert_called_once_with() - inst._xlated.clear.assert_called_once_with() - - @mock.patch.object(config, 'COWDict', return_value='cow') - @mock.patch.object(config.Load, '_load') - def test_inst_load_novalidate(self, mock_load, mock_COWDict): - inst = mock.Mock(_raw='raw') - load = config.Load() - - result = load.inst_load(inst, 'files', False) - - self.assertEqual(result, inst) - mock_COWDict.assert_called_once_with('raw') - mock_load.assert_called_once_with('files', 'cow') - cow = mock_load.return_value - self.assertFalse(inst.validate.called) - cow.apply.assert_called_once_with() - inst._xlated.clear.assert_called_once_with() - - -class BaseConfigTest(unittest.TestCase): - def test_init(self): - result = config.BaseConfig('value') - - self.assertEqual(result._raw, 'value') - self.assertEqual(result._xlated, {}) - - def test_lookup_noname(self): - class TestConfig(config.BaseConfig): - _attrs = {} - - self.assertRaises(KeyError, TestConfig.lookup, '') - - def test_lookup_simplename(self): - class TestConfig(config.BaseConfig): - _attrs = {'spam': 'value'} - - result = TestConfig.lookup('spam') - - self.assertEqual(result, 'value') - - def test_lookup_shortlist(self): - class TestConfig(config.BaseConfig): - _attrs = {'spam': 'value'} - - result = TestConfig.lookup(['spam']) - - self.assertEqual(result, 'value') - - def test_lookup_descend(self): - class TestConfig(config.BaseConfig): - _attrs = { - 'spam': mock.Mock(_attrs={ - 'a': mock.Mock(_attrs={ - 'b': mock.Mock(_attrs={ - 'c': 'value', - }), - }), - }), - } - - result = TestConfig.lookup('//spam/a//b/c//') - - self.assertEqual(result, 'value') - - def test_lookup_descend_list(self): - class TestConfig(config.BaseConfig): - _attrs = { - 'spam': mock.Mock(_attrs={ - 'a': mock.Mock(_attrs={ - 'b': mock.Mock(_attrs={ - 'c': 'value', - }), - }), - }), - } - - result = TestConfig.lookup(['', 'spam', 'a', '', 'b', 'c', '']) - - self.assertEqual(result, 'value') - - @mock.patch.object(config.BaseConfig, '_extend') - @mock.patch.object(config.BaseConfig, 'lookup') - def test_extend_noattr(self, mock_lookup, mock_extend): - class TestConfig(config.BaseConfig): - _attrs = {} - - self.assertRaises(config.ConfigException, TestConfig.extend, - '', 'option') - self.assertFalse(mock_lookup.called) - self.assertFalse(mock_extend.called) - - @mock.patch.object(config.BaseConfig, '_extend') - @mock.patch.object(config.BaseConfig, 'lookup') - def test_extend_simpleattr(self, mock_lookup, mock_extend): - opt = mock.Mock() - - class TestConfig(config.BaseConfig): - _attrs = {'spam': opt} - - TestConfig.extend('foo', 'option') - - self.assertFalse(mock_lookup.called) - mock_extend.assert_called_once_with('foo', 'foo', 'option') - - @mock.patch.object(config.BaseConfig, '_extend') - @mock.patch.object(config.BaseConfig, 'lookup') - def test_extend_shortlist(self, mock_lookup, mock_extend): - opt = mock.Mock() - - class TestConfig(config.BaseConfig): - _attrs = {'spam': opt} - - TestConfig.extend(['foo'], 'option') - - self.assertFalse(mock_lookup.called) - mock_extend.assert_called_once_with('foo', 'foo', 'option') - - @mock.patch.object(config.BaseConfig, '_extend') - @mock.patch.object(config.BaseConfig, 'lookup') - def test_extend_descend(self, mock_lookup, mock_extend): - opt = mock.Mock() - - class TestConfig(config.BaseConfig): - _attrs = {} - - TestConfig.extend('//spam/a//b/c//', 'option') - - mock_lookup.assert_called_once_with(['spam', 'a', 'b']) - mock_lookup.return_value._extend.assert_called_once_with( - 'c', 'c', 'option') - self.assertFalse(mock_extend.called) - - @mock.patch.object(config.BaseConfig, '_extend') - @mock.patch.object(config.BaseConfig, 'lookup') - def test_extend_descend_list(self, mock_lookup, mock_extend): - opt = mock.Mock() - - class TestConfig(config.BaseConfig): - _attrs = {} - - TestConfig.extend(['', 'spam', 'a', '', 'b', 'c', ''], 'option') - - mock_lookup.assert_called_once_with(['spam', 'a', 'b']) - mock_lookup.return_value._extend.assert_called_once_with( - 'c', 'c', 'option') - self.assertFalse(mock_extend.called) - - @mock.patch.object(config.BaseConfig, '_extend') - @mock.patch.object(config.BaseConfig, 'lookup') - def test_extend_altkey(self, mock_lookup, mock_extend): - opt = mock.Mock() - - class TestConfig(config.BaseConfig): - _attrs = {'spam': opt} - - TestConfig.extend('foo', 'option', 'key') - - self.assertFalse(mock_lookup.called) - mock_extend.assert_called_once_with('foo', 'key', 'option') - - @mock.patch.object(config.BaseConfig, '_extend') - @mock.patch.object(config.BaseConfig, 'lookup') - def test_extend_reservedattr(self, mock_lookup, mock_extend): - opt = mock.Mock() - - class TestConfig(config.BaseConfig): - _attrs = {'spam': opt} - - for attr in config.RESERVED: - self.assertRaises(config.ConfigException, TestConfig.extend, - attr, 'option') - - self.assertFalse(mock_lookup.called) - self.assertFalse(mock_extend.called) - - @mock.patch.object(config, '_schema_invalidate') - def test_extend_dupattr(self, mock_schema_invalidate): - opt = mock.Mock(spec=config.Option, _parents=set(), __key__='key') - - class TestConfig(config.BaseConfig): - _attrs = {'spam': opt} - _keys = {'key': opt} - - self.assertRaises(config.ConfigException, TestConfig._extend, - 'spam', 'bar', 'option') - self.assertFalse(mock_schema_invalidate.called) - - @mock.patch.object(config, '_schema_invalidate') - def test_extend_dupkey(self, mock_schema_invalidate): - opt = mock.Mock(spec=config.Option, _parents=set(), __key__='key') - - class TestConfig(config.BaseConfig): - _attrs = {'spam': opt} - _keys = {'key': opt} - - self.assertRaises(config.ConfigException, TestConfig._extend, - 'foo', 'key', 'option') - self.assertFalse(mock_schema_invalidate.called) - - @mock.patch.object(config, '_schema_invalidate') - def test_extend(self, mock_schema_invalidate): - opt = mock.Mock(spec=config.Option, _parents=set(), __key__='key') - - class TestConfig(config.BaseConfig): - _attrs = {'spam': opt} - _keys = {'key': opt} - - TestConfig._extend('foo', 'bar', 'option') - - foo = TestConfig.foo - self.assertTrue(isinstance(foo, config.Binding)) - self.assertEqual(foo.__attr__, 'foo') - self.assertEqual(foo.__key__, 'bar') - self.assertEqual(foo.__option__, 'option') - self.assertEqual(TestConfig._attrs, { - 'spam': opt, - 'foo': foo, - }) - self.assertEqual(TestConfig._keys, { - 'key': opt, - 'bar': foo, - }) - mock_schema_invalidate.assert_called_once_with(TestConfig) - - @mock.patch('jsonschema.validate') - def test_validate(self, mock_validate): - class TestConfig(config.BaseConfig): - __schema__ = 'schema' - - TestConfig.validate('value') - - mock_validate.assert_called_once_with('value', 'schema') - - -class ConfigMetaTest(unittest.TestCase): - def test_new_base(self): - self.assertEqual(config.Config._attrs, {}) - self.assertEqual(config.Config._keys, {}) - self.assertEqual(config.Config._schema_raw, {'type': 'object'}) - self.assertEqual(config.Config._schema_cache, None) - self.assertEqual(config.Config._parents, set()) - - def test_new_schema_capture(self): - class TestConfig(config.Config): - __schema__ = {'a': 1} - - self.assertFalse('__schema__' in TestConfig.__dict__) - self.assertEqual(TestConfig._attrs, {}) - self.assertEqual(TestConfig._keys, {}) - self.assertEqual(TestConfig._schema_raw, {'type': 'object', 'a': 1}) - self.assertEqual(TestConfig._schema_cache, None) - self.assertEqual(TestConfig._parents, set()) - - def test_new_passthrough_internal(self): - opt = mock.Mock(spec=config.Option) - - class TestConfig(config.Config): - _spam = opt - - self.assertTrue('_spam' in TestConfig.__dict__) - self.assertEqual(TestConfig._attrs, {}) - self.assertEqual(TestConfig._keys, {}) - self.assertEqual(TestConfig._schema_raw, {'type': 'object'}) - self.assertEqual(TestConfig._schema_cache, None) - self.assertEqual(TestConfig._parents, set()) - self.assertEqual(TestConfig._spam, opt) - - def test_new_passthrough_other(self): - class TestConfig(config.Config): - spam = 'value' - - self.assertTrue('spam' in TestConfig.__dict__) - self.assertEqual(TestConfig._attrs, {}) - self.assertEqual(TestConfig._keys, {}) - self.assertEqual(TestConfig._schema_raw, {'type': 'object'}) - self.assertEqual(TestConfig._schema_cache, None) - self.assertEqual(TestConfig._parents, set()) - self.assertEqual(TestConfig.spam, 'value') - - def test_new_option_nokey(self): - opt = mock.Mock(spec=config.Option, _parents=set()) - - class TestConfig(config.Config): - spam = opt - - self.assertTrue('spam' in TestConfig.__dict__) - spam = TestConfig.__dict__['spam'] - self.assertTrue(isinstance(spam, config.Binding)) - self.assertEqual(spam.__attr__, 'spam') - self.assertEqual(spam.__key__, 'spam') - self.assertEqual(spam.__option__, opt) - self.assertEqual(spam.__option__._parents, set([TestConfig])) - self.assertEqual(TestConfig._attrs, {'spam': spam}) - self.assertEqual(TestConfig._keys, {'spam': spam}) - self.assertEqual(TestConfig._schema_raw, {'type': 'object'}) - self.assertEqual(TestConfig._schema_cache, None) - self.assertEqual(TestConfig._parents, set()) - - def test_new_option_withkey(self): - opt = mock.Mock(spec=config.Option, _parents=set(), __key__='key') - - class TestConfig(config.Config): - spam = opt - - self.assertTrue('spam' in TestConfig.__dict__) - spam = TestConfig.__dict__['spam'] - self.assertTrue(isinstance(spam, config.Binding)) - self.assertEqual(spam.__attr__, 'spam') - self.assertEqual(spam.__key__, 'key') - self.assertEqual(spam.__option__, opt) - self.assertEqual(spam.__option__._parents, set([TestConfig])) - self.assertEqual(TestConfig._attrs, {'spam': spam}) - self.assertEqual(TestConfig._keys, {'key': spam}) - self.assertEqual(TestConfig._schema_raw, {'type': 'object'}) - self.assertEqual(TestConfig._schema_cache, None) - self.assertEqual(TestConfig._parents, set()) - - def test_new_option_duplicatekey(self): - opt1 = mock.Mock(spec=config.Option, _parents=set(), __key__='key') - opt2 = mock.Mock(spec=config.Option, _parents=set(), __key__='key') - namespace = { - 'spam1': opt1, - 'spam2': opt2, - } - - self.assertRaises(config.ConfigException, config.ConfigMeta, - 'TestConfig', (config.Config,), namespace) - - def test_new_class_nokey(self): - class TestConfig(config.Config): - class spam(config.Config): - pass - - self.assertTrue('spam' in TestConfig.__dict__) - spam = TestConfig.__dict__['spam'] - self.assertTrue(isinstance(spam, config.Binding)) - self.assertEqual(spam.__attr__, 'spam') - self.assertEqual(spam.__key__, 'spam') - self.assertTrue(issubclass(spam.__option__, config.Config)) - self.assertEqual(spam.__option__._parents, set([TestConfig])) - self.assertEqual(TestConfig._attrs, {'spam': spam}) - self.assertEqual(TestConfig._keys, {'spam': spam}) - self.assertEqual(TestConfig._schema_raw, {'type': 'object'}) - self.assertEqual(TestConfig._schema_cache, None) - self.assertEqual(TestConfig._parents, set()) - - def test_new_class_withkey(self): - class TestConfig(config.Config): - class spam(config.Config): - __key__ = 'key' - - self.assertTrue('spam' in TestConfig.__dict__) - spam = TestConfig.__dict__['spam'] - self.assertTrue(isinstance(spam, config.Binding)) - self.assertEqual(spam.__attr__, 'spam') - self.assertEqual(spam.__key__, 'key') - self.assertTrue(issubclass(spam.__option__, config.Config)) - self.assertEqual(spam.__option__._parents, set([TestConfig])) - self.assertEqual(TestConfig._attrs, {'spam': spam}) - self.assertEqual(TestConfig._keys, {'key': spam}) - self.assertEqual(TestConfig._schema_raw, {'type': 'object'}) - self.assertEqual(TestConfig._schema_cache, None) - self.assertEqual(TestConfig._parents, set()) - - def test_new_reservedattr(self): - for attr in config.RESERVED: - opt = mock.Mock(spec=config.Option, _parents=set()) - namespace = {attr: opt} - - self.assertRaises(config.ConfigException, config.ConfigMeta, - 'TestConfig', (config.Config,), namespace) - - -class OptionTest(unittest.TestCase): - def test_init_base(self): - result = config.Option() - - self.assertEqual(result.__default__, config._unset) - self.assertEqual(result.__doc__, '') - self.assertEqual(getattr(result, '__key__', 'unset'), 'unset') - self.assertEqual(result._schema_raw, {}) - self.assertEqual(result._parents, set()) - - def test_init_alt(self): - result = config.Option('default', 'help', {'type': 'int'}, - [1, 2, 3, 5, 8], 'key') - - self.assertEqual(result.__default__, 'default') - self.assertEqual(result.__doc__, 'help') - self.assertEqual(getattr(result, '__key__', 'unset'), 'key') - self.assertEqual(result._schema_raw, { - 'type': 'int', - 'default': 'default', - 'description': 'help', - 'enum': [1, 2, 3, 5, 8], - }) - self.assertEqual(result._parents, set()) - - def test_call(self): - opt = config.Option() - - result = opt('spam') - - self.assertEqual(result, 'spam') - - def test_extend(self): - opt = config.Option() - - self.assertRaises(config.ConfigException, opt._extend, - 'attr', 'key', 'option') - - @mock.patch.object(config.Option, '__schema__', 'schema') - @mock.patch('jsonschema.validate') - def test_validate(self, mock_validate): - opt = config.Option() - - opt.validate('value') - - mock_validate.assert_called_once_with('value', 'schema') - - def test_schema(self): - opt = config.Option() - opt._schema_raw = 'schema' - - self.assertEqual(opt.__schema__, 'schema') - - -class ListOptionTest(unittest.TestCase): - def test_init_base(self): - result = config.ListOption() - - self.assertEqual(result.__default__, config._unset) - self.assertEqual(result.__doc__, '') - self.assertEqual(getattr(result, '__key__', 'unset'), 'unset') - self.assertEqual(result._schema_raw, {'type': 'array'}) - self.assertEqual(result._parents, set()) - self.assertEqual(result._mode, 'noxlate') - self.assertEqual(result._items, None) - self.assertEqual(result._attrs, {}) - self.assertEqual(result._schema_cache, None) - - def test_init_alt(self): - result = config.ListOption('default', 'help', {'extra': 'data'}, - None, 'key') - - self.assertEqual(result.__default__, 'default') - self.assertEqual(result.__doc__, 'help') - self.assertEqual(getattr(result, '__key__', 'unset'), 'key') - self.assertEqual(result._schema_raw, { - 'type': 'array', - 'default': 'default', - 'description': 'help', - 'extra': 'data', - }) - self.assertEqual(result._parents, set()) - self.assertEqual(result._mode, 'noxlate') - self.assertEqual(result._items, None) - self.assertEqual(result._attrs, {}) - self.assertEqual(result._schema_cache, None) - - def test_init_list(self): - items = mock.Mock(_parents=set()) - result = config.ListOption(items=items) - - self.assertEqual(result._mode, 'list') - self.assertEqual(result._items, items) - self.assertEqual(result._attrs, {'[]': items}) - self.assertEqual(items._parents, set([result])) - - def test_init_tuple(self): - items = [mock.Mock(_parents=set()) for i in range(5)] - items[2] = None - result = config.ListOption(items=tuple(items)) - - self.assertEqual(result._mode, 'tuple') - self.assertEqual(result._items, items) - self.assertEqual(result._attrs, { - '[0]': items[0], - '[1]': items[1], - '[3]': items[3], - '[4]': items[4], - }) - for item in items: - if item: - self.assertEqual(item._parents, set([result])) - - def test_call_noxlate(self): - opt = config.ListOption() - - result = opt([1, 2, 3]) - - self.assertEqual(result, [1, 2, 3]) - - def test_call_list(self): - items = mock.Mock(_parents=set(), side_effect=lambda x: str(x)) - opt = config.ListOption(items=items) - - result = opt([1, 2, 3]) - - self.assertEqual(result, ['1', '2', '3']) - items.assert_has_calls([ - mock.call(1), - mock.call(2), - mock.call(3), - ]) - self.assertEqual(items.call_count, 3) - - def test_call_tuple(self): - items = [ - mock.Mock(_parents=set(), side_effect=lambda x: (x, 1)), - mock.Mock(_parents=set(), side_effect=lambda x: (x, 2)), - mock.Mock(_parents=set(), side_effect=lambda x: (x, 3)), - ] - opt = config.ListOption(items=items) - - result = opt([1, 2, 3, 4, 5]) - - self.assertEqual(result, [(1, 1), (2, 2), (3, 3), 4, 5]) - for idx, item in enumerate(items): - item.assert_called_once_with(idx + 1) - - def test_schema_cached(self): - opt = config.ListOption() - opt._schema_cache = 'cached' - - self.assertEqual(opt.__schema__, 'cached') - self.assertEqual(opt._schema_cache, 'cached') - - def test_schema_noxlate(self): - opt = config.ListOption() - - expected = { - 'type': 'array', - } - self.assertEqual(opt.__schema__, expected) - self.assertEqual(opt._schema_cache, expected) - - def test_schema_list(self): - items = mock.Mock(_parents=set(), __schema__='schema') - opt = config.ListOption(items=items) - - expected = { - 'type': 'array', - 'items': 'schema', - } - self.assertEqual(opt.__schema__, expected) - self.assertEqual(opt._schema_cache, expected) - - def test_schema_tuple(self): - items = [mock.Mock(_parents=set(), __schema__='schema%d' % i) - for i in range(5)] - items[2] = None - opt = config.ListOption(items=items) - - expected = { - 'type': 'array', - 'items': ['schema0', 'schema1', {}, 'schema3', 'schema4'], - } - self.assertEqual(opt.__schema__, expected) - self.assertEqual(opt._schema_cache, expected) diff --git a/tests/unit/common/test_utils.py b/tests/unit/common/test_utils.py deleted file mode 100644 index 64040ba..0000000 --- a/tests/unit/common/test_utils.py +++ /dev/null @@ -1,123 +0,0 @@ -# Copyright 2014 Rackspace -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an "AS -# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language -# governing permissions and limitations under the License. - -import unittest - -import mock - -from striker.common import utils - -import tests - - -class CanonicalizePathTest(unittest.TestCase): - @mock.patch('os.path.isabs', tests.fake_isabs) - @mock.patch('os.path.join', tests.fake_join) - @mock.patch('os.path.abspath', tests.fake_abspath) - def test_absolute(self): - result = utils.canonicalize_path('/foo/bar', '/bar/baz') - - self.assertEqual(result, '/bar/baz') - - @mock.patch('os.path.isabs', tests.fake_isabs) - @mock.patch('os.path.join', tests.fake_join) - @mock.patch('os.path.abspath', tests.fake_abspath) - def test_relative(self): - result = utils.canonicalize_path('/foo/bar', 'bar/baz') - - self.assertEqual(result, '/foo/bar/bar/baz') - - @mock.patch('os.path.isabs', tests.fake_isabs) - @mock.patch('os.path.join', tests.fake_join) - @mock.patch('os.path.abspath', tests.fake_abspath) - def test_relative_with_cwd(self): - result = utils.canonicalize_path('/foo/bar', './baz') - - self.assertEqual(result, '/foo/bar/baz') - - @mock.patch('os.path.isabs', tests.fake_isabs) - @mock.patch('os.path.join', tests.fake_join) - @mock.patch('os.path.abspath', tests.fake_abspath) - def test_relative_with_parent(self): - result = utils.canonicalize_path('/foo/bar', '../baz') - - self.assertEqual(result, '/foo/baz') - - -class BackoffTest(unittest.TestCase): - @mock.patch('time.sleep') - def test_backoff(self, mock_sleep): - max_tries = 5 - - for i, trial in enumerate(utils.backoff(max_tries)): - self.assertEqual(i, trial) - - if i: - mock_sleep.assert_called_once_with(1 << (i - 1)) - else: - self.assertFalse(mock_sleep.called) - - mock_sleep.reset_mock() - - self.assertEqual(i, max_tries - 1) - - -class BooleanTest(unittest.TestCase): - truth_table = [ - ('TrUe', True), - ('t', True), - ('T', True), - ('yEs', True), - ('y', True), - ('Y', True), - ('oN', True), - ('1', True), - ('120', True), - ('FaLsE', False), - ('f', False), - ('F', False), - ('nO', False), - ('n', False), - ('N', False), - ('oFf', False), - ('0', False), - ('000', False), - ('other', None), - (True, True), - (False, False), - (1, True), - (0, False), - ] - - def test_with_raise(self): - for value, expected in self.truth_table: - if expected is None: - self.assertRaises(ValueError, utils.boolean, value) - else: - self.assertEqual(expected, utils.boolean(value)) - - def test_default_false(self): - for value, expected in self.truth_table: - if expected is None: - expected = False - - self.assertEqual(expected, utils.boolean(value, False)) - - def test_default_true(self): - for value, expected in self.truth_table: - if expected is None: - expected = True - - self.assertEqual(expected, utils.boolean(value, True)) diff --git a/tests/unit/core/__init__.py b/tests/unit/core/__init__.py deleted file mode 100644 index 6a3ca48..0000000 --- a/tests/unit/core/__init__.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright 2014 Rackspace -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an "AS -# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language -# governing permissions and limitations under the License. diff --git a/tests/unit/core/test_context.py b/tests/unit/core/test_context.py deleted file mode 100644 index 283f08a..0000000 --- a/tests/unit/core/test_context.py +++ /dev/null @@ -1,79 +0,0 @@ -# Copyright 2014 Rackspace -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an "AS -# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language -# governing permissions and limitations under the License. - -import unittest - -import mock - -from striker.core import context -from striker.core import environment - - -class ContextTest(unittest.TestCase): - def test_init_base(self): - ctxt = context.Context('/path/to/workspace', 'config', 'logger') - - self.assertEqual(ctxt.workspace, '/path/to/workspace') - self.assertEqual(ctxt.config, 'config') - self.assertEqual(ctxt.logger, 'logger') - self.assertEqual(ctxt.debug, False) - self.assertEqual(ctxt.dry_run, False) - self.assertEqual(ctxt._extras, {}) - self.assertEqual(ctxt._environ, None) - - def test_init_alt(self): - ctxt = context.Context('/path/to/workspace', 'config', 'logger', - debug=True, dry_run=True, accounts='accounts', - other='other') - - self.assertEqual(ctxt.workspace, '/path/to/workspace') - self.assertEqual(ctxt.config, 'config') - self.assertEqual(ctxt.logger, 'logger') - self.assertEqual(ctxt.debug, True) - self.assertEqual(ctxt.dry_run, True) - self.assertEqual(ctxt._extras, { - 'accounts': 'accounts', - 'other': 'other', - }) - self.assertEqual(ctxt._environ, None) - - def test_getattr_exists(self): - ctxt = context.Context('/path/to/workspace', 'config', 'logger', - attr='value') - - self.assertEqual(ctxt.attr, 'value') - - def test_getattr_noexist(self): - ctxt = context.Context('/path/to/workspace', 'config', 'logger', - attr='value') - - self.assertRaises(AttributeError, lambda: ctxt.other) - - @mock.patch.object(environment, 'Environment', return_value='environ') - def test_environ_cached(self, mock_Environment): - ctxt = context.Context('/path/to/workspace', 'config', 'logger') - ctxt._environ = 'cached' - - self.assertEqual(ctxt.environ, 'cached') - self.assertEqual(ctxt._environ, 'cached') - self.assertFalse(mock_Environment.called) - - @mock.patch.object(environment, 'Environment', return_value='environ') - def test_environ_uncached(self, mock_Environment): - ctxt = context.Context('/path/to/workspace', 'config', 'logger') - - self.assertEqual(ctxt.environ, 'environ') - self.assertEqual(ctxt._environ, 'environ') - mock_Environment.assert_called_once_with('logger') diff --git a/tests/unit/core/test_environment.py b/tests/unit/core/test_environment.py deleted file mode 100644 index 457594d..0000000 --- a/tests/unit/core/test_environment.py +++ /dev/null @@ -1,667 +0,0 @@ -# Copyright 2014 Rackspace -# All Rights Reserved. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the -# License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an "AS -# IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either -# express or implied. See the License for the specific language -# governing permissions and limitations under the License. - -import os -import subprocess -import unittest - -import mock - -from striker.common import utils -from striker.core import environment - -import tests - - -class ExecResultTest(unittest.TestCase): - def test_init_success(self): - cmd = ['arg1', 'arg2 space', 'arg3"double', "arg4'single", 'arg5'] - cmd_text = 'arg1 "arg2 space" "arg3\\"double" "arg4\'single" arg5' - result = environment.ExecResult(cmd, None, None, 0) - - self.assertEqual(result.cmd, cmd) - self.assertEqual(result.cmd_text, cmd_text) - self.assertEqual(result.stdout, None) - self.assertEqual(result.stderr, None) - self.assertEqual(result.return_code, 0) - self.assertEqual(str(result), "'%s' succeeded" % cmd_text) - - def test_init_stdout(self): - cmd = ['arg1', 'arg2 space', 'arg3"double', "arg4'single", 'arg5'] - cmd_text = 'arg1 "arg2 space" "arg3\\"double" "arg4\'single" arg5' - result = environment.ExecResult(cmd, 'output', None, 0) - - self.assertEqual(result.cmd, cmd) - self.assertEqual(result.cmd_text, cmd_text) - self.assertEqual(result.stdout, 'output') - self.assertEqual(result.stderr, None) - self.assertEqual(result.return_code, 0) - self.assertEqual(str(result), "'%s' said: output" % cmd_text) - - def test_init_stderr(self): - cmd = ['arg1', 'arg2 space', 'arg3"double', "arg4'single", 'arg5'] - cmd_text = 'arg1 "arg2 space" "arg3\\"double" "arg4\'single" arg5' - result = environment.ExecResult(cmd, 'output', 'error', 0) - - self.assertEqual(result.cmd, cmd) - self.assertEqual(result.cmd_text, cmd_text) - self.assertEqual(result.stdout, 'output') - self.assertEqual(result.stderr, 'error') - self.assertEqual(result.return_code, 0) - self.assertEqual(str(result), "'%s' said: error" % cmd_text) - - def test_init_failure(self): - cmd = ['arg1', 'arg2 space', 'arg3"double', "arg4'single", 'arg5'] - cmd_text = 'arg1 "arg2 space" "arg3\\"double" "arg4\'single" arg5' - result = environment.ExecResult(cmd, 'output', 'error', 5) - - self.assertEqual(result.cmd, cmd) - self.assertEqual(result.cmd_text, cmd_text) - self.assertEqual(result.stdout, 'output') - self.assertEqual(result.stderr, 'error') - self.assertEqual(result.return_code, 5) - self.assertEqual(str(result), "'%s' failed with return code 5" % - cmd_text) - - def test_true(self): - result = environment.ExecResult(['cmd'], None, None, 0) - - self.assertTrue(result) - - def test_false(self): - result = environment.ExecResult(['cmd'], None, None, 1) - - self.assertFalse(result) - - -class EnvironmentTest(unittest.TestCase): - @mock.patch.dict(os.environ, clear=True, TEST_VAR1='1', TEST_VAR2='2') - @mock.patch.object(os, 'getcwd', return_value='/some/path') - @mock.patch.object(environment.Environment, 'chdir') - def test_init_base(self, mock_chdir, mock_getcwd): - env = environment.Environment('logger') - - self.assertEqual(env, {'TEST_VAR1': '1', 'TEST_VAR2': '2'}) - self.assertEqual(env.logger, 'logger') - self.assertEqual(env.cwd, '/some/path') - self.assertEqual(env.venv_home, None) - self.assertFalse(mock_chdir.called) - - @mock.patch.dict(os.environ, clear=True, TEST_VAR1='1', TEST_VAR2='2') - @mock.patch.object(os, 'getcwd', return_value='/some/path') - @mock.patch.object(environment.Environment, 'chdir') - def test_init_alt(self, mock_chdir, mock_getcwd): - environ = { - 'TEST_VAR3': '3', - 'TEST_VAR4': '4', - } - env = environment.Environment('logger', environ, '/other/path', - '/venv/home') - - self.assertEqual(env, environ) - self.assertEqual(env.logger, 'logger') - self.assertEqual(env.cwd, '/some/path') - self.assertEqual(env.venv_home, '/venv/home') - mock_chdir.assert_called_once_with('/other/path') - - @mock.patch.dict(os.environ, clear=True, TEST_VAR1='1', TEST_VAR2='2') - @mock.patch.object(os.path, 'join', tests.fake_join) - @mock.patch.object(os, 'pathsep', ':') - @mock.patch.object(os, 'getcwd', return_value='/some/path') - @mock.patch.object(environment.Environment, 'chdir') - @mock.patch.object(utils, 'canonicalize_path', return_value='/canon/path') - @mock.patch.object(utils, 'backoff', return_value=[0]) - @mock.patch.object(subprocess, 'Popen', return_value=mock.Mock(**{ - 'returncode': 0, - 'communicate.return_value': (None, None), - })) - def test_call_basic(self, mock_Popen, mock_backoff, mock_canonicalize_path, - mock_chdir, mock_getcwd): - logger = mock.Mock() - env = environment.Environment(logger) - - result = env(['test', 'one', 'two']) - - self.assertEqual(result.cmd, ['test', 'one', 'two']) - self.assertEqual(result.stdout, None) - self.assertEqual(result.stderr, None) - self.assertEqual(result.return_code, 0) - self.assertFalse(mock_canonicalize_path.called) - mock_backoff.assert_called_once_with(1) - mock_Popen.assert_called_once_with( - ['test', 'one', 'two'], env=env, cwd='/some/path', close_fds=True) - logger.assert_has_calls([ - mock.call.debug( - "Executing command: ['test', 'one', 'two'] (cwd /some/path)"), - ]) - self.assertEqual(len(logger.method_calls), 1) - - @mock.patch.dict(os.environ, clear=True, TEST_VAR1='1', TEST_VAR2='2') - @mock.patch.object(os, 'getcwd', return_value='/some/path') - @mock.patch.object(environment.Environment, 'chdir') - @mock.patch.object(utils, 'canonicalize_path', return_value='/canon/path') - @mock.patch.object(utils, 'backoff', return_value=[0]) - @mock.patch.object(subprocess, 'Popen', return_value=mock.Mock(**{ - 'returncode': 0, - 'communicate.return_value': (None, None), - })) - def test_call_string(self, mock_Popen, mock_backoff, - mock_canonicalize_path, mock_chdir, mock_getcwd): - logger = mock.Mock() - env = environment.Environment(logger) - - result = env("test one two") - - self.assertEqual(result.cmd, ['test', 'one', 'two']) - self.assertEqual(result.stdout, None) - self.assertEqual(result.stderr, None) - self.assertEqual(result.return_code, 0) - self.assertFalse(mock_canonicalize_path.called) - mock_backoff.assert_called_once_with(1) - mock_Popen.assert_called_once_with( - ['test', 'one', 'two'], env=env, cwd='/some/path', close_fds=True) - logger.assert_has_calls([ - mock.call.debug( - "Notice: splitting command string 'test one two'"), - mock.call.debug( - "Executing command: ['test', 'one', 'two'] (cwd /some/path)"), - ]) - self.assertEqual(len(logger.method_calls), 2) - - @mock.patch.dict(os.environ, clear=True, TEST_VAR1='1', TEST_VAR2='2') - @mock.patch.object(os, 'getcwd', return_value='/some/path') - @mock.patch.object(environment.Environment, 'chdir') - @mock.patch.object(utils, 'canonicalize_path', return_value='/canon/path') - @mock.patch.object(utils, 'backoff', return_value=[0]) - @mock.patch.object(subprocess, 'Popen', return_value=mock.Mock(**{ - 'returncode': 0, - 'communicate.return_value': (None, None), - })) - def test_call_cwd(self, mock_Popen, mock_backoff, mock_canonicalize_path, - mock_chdir, mock_getcwd): - logger = mock.Mock() - env = environment.Environment(logger) - - result = env(['test', 'one', 'two'], cwd='/other/path') - - self.assertEqual(result.cmd, ['test', 'one', 'two']) - self.assertEqual(result.stdout, None) - self.assertEqual(result.stderr, None) - self.assertEqual(result.return_code, 0) - mock_canonicalize_path.assert_called_once_with( - '/some/path', '/other/path') - mock_backoff.assert_called_once_with(1) - mock_Popen.assert_called_once_with( - ['test', 'one', 'two'], env=env, cwd='/canon/path', close_fds=True) - logger.assert_has_calls([ - mock.call.debug( - "Executing command: ['test', 'one', 'two'] (cwd /canon/path)"), - ]) - self.assertEqual(len(logger.method_calls), 1) - - @mock.patch.dict(os.environ, clear=True, TEST_VAR1='1', TEST_VAR2='2') - @mock.patch.object(os, 'getcwd', return_value='/some/path') - @mock.patch.object(environment.Environment, 'chdir') - @mock.patch.object(utils, 'canonicalize_path', return_value='/canon/path') - @mock.patch.object(utils, 'backoff', return_value=[0]) - @mock.patch.object(subprocess, 'Popen', return_value=mock.Mock(**{ - 'returncode': 0, - 'communicate.return_value': ('output', 'error'), - })) - def test_call_capture(self, mock_Popen, mock_backoff, - mock_canonicalize_path, mock_chdir, mock_getcwd): - logger = mock.Mock() - env = environment.Environment(logger) - - result = env(['test', 'one', 'two'], capture_output=True) - - self.assertEqual(result.cmd, ['test', 'one', 'two']) - self.assertEqual(result.stdout, 'output') - self.assertEqual(result.stderr, 'error') - self.assertEqual(result.return_code, 0) - self.assertFalse(mock_canonicalize_path.called) - mock_backoff.assert_called_once_with(1) - mock_Popen.assert_called_once_with( - ['test', 'one', 'two'], env=env, cwd='/some/path', close_fds=True, - stdout=subprocess.PIPE, stderr=subprocess.PIPE) - logger.assert_has_calls([ - mock.call.debug( - "Executing command: ['test', 'one', 'two'] (cwd /some/path)"), - ]) - self.assertEqual(len(logger.method_calls), 1) - - @mock.patch.dict(os.environ, clear=True, TEST_VAR1='1', TEST_VAR2='2') - @mock.patch.object(os, 'getcwd', return_value='/some/path') - @mock.patch.object(environment.Environment, 'chdir') - @mock.patch.object(utils, 'canonicalize_path', return_value='/canon/path') - @mock.patch.object(utils, 'backoff', return_value=[0]) - @mock.patch.object(subprocess, 'Popen', return_value=mock.Mock(**{ - 'returncode': 1, - 'communicate.return_value': (None, None), - })) - def test_call_failure_raise(self, mock_Popen, mock_backoff, - mock_canonicalize_path, mock_chdir, - mock_getcwd): - logger = mock.Mock() - env = environment.Environment(logger) - - try: - result = env(['test', 'one', 'two']) - except environment.ExecResult as exc: - self.assertEqual(exc.cmd, ['test', 'one', 'two']) - self.assertEqual(exc.stdout, None) - self.assertEqual(exc.stderr, None) - self.assertEqual(exc.return_code, 1) - else: - self.fail("Expected ExecResult to be raised") - - self.assertFalse(mock_canonicalize_path.called) - mock_backoff.assert_called_once_with(1) - mock_Popen.assert_called_once_with( - ['test', 'one', 'two'], env=env, cwd='/some/path', close_fds=True) - logger.assert_has_calls([ - mock.call.debug( - "Executing command: ['test', 'one', 'two'] (cwd /some/path)"), - ]) - self.assertEqual(len(logger.method_calls), 1) - - @mock.patch.dict(os.environ, clear=True, TEST_VAR1='1', TEST_VAR2='2') - @mock.patch.object(os, 'getcwd', return_value='/some/path') - @mock.patch.object(environment.Environment, 'chdir') - @mock.patch.object(utils, 'canonicalize_path', return_value='/canon/path') - @mock.patch.object(utils, 'backoff', return_value=[0]) - @mock.patch.object(subprocess, 'Popen', return_value=mock.Mock(**{ - 'returncode': 1, - 'communicate.return_value': (None, None), - })) - def test_call_failure_noraise(self, mock_Popen, mock_backoff, - mock_canonicalize_path, mock_chdir, - mock_getcwd): - logger = mock.Mock() - env = environment.Environment(logger) - - result = env(['test', 'one', 'two'], do_raise=False) - - self.assertEqual(result.cmd, ['test', 'one', 'two']) - self.assertEqual(result.stdout, None) - self.assertEqual(result.stderr, None) - self.assertEqual(result.return_code, 1) - self.assertFalse(mock_canonicalize_path.called) - mock_backoff.assert_called_once_with(1) - mock_Popen.assert_called_once_with( - ['test', 'one', 'two'], env=env, cwd='/some/path', close_fds=True) - logger.assert_has_calls([ - mock.call.debug( - "Executing command: ['test', 'one', 'two'] (cwd /some/path)"), - ]) - self.assertEqual(len(logger.method_calls), 1) - - @mock.patch.dict(os.environ, clear=True, TEST_VAR1='1', TEST_VAR2='2') - @mock.patch.object(os, 'getcwd', return_value='/some/path') - @mock.patch.object(environment.Environment, 'chdir') - @mock.patch.object(utils, 'canonicalize_path', return_value='/canon/path') - @mock.patch.object(utils, 'backoff', return_value=[0]) - @mock.patch.object(subprocess, 'Popen', return_value=mock.Mock(**{ - 'returncode': 0, - 'communicate.return_value': ('output', 'error'), - })) - def test_call_retry_success(self, mock_Popen, mock_backoff, - mock_canonicalize_path, mock_chdir, - mock_getcwd): - logger = mock.Mock() - env = environment.Environment(logger) - retry = mock.Mock(return_value=True) - - result = env(['test', 'one', 'two'], retry=retry) - - self.assertEqual(result.cmd, ['test', 'one', 'two']) - self.assertEqual(result.stdout, 'output') - self.assertEqual(result.stderr, 'error') - self.assertEqual(result.return_code, 0) - self.assertFalse(mock_canonicalize_path.called) - mock_backoff.assert_called_once_with(5) - mock_Popen.assert_called_once_with( - ['test', 'one', 'two'], env=env, cwd='/some/path', close_fds=True, - stdout=subprocess.PIPE, stderr=subprocess.PIPE) - logger.assert_has_calls([ - mock.call.debug( - "Executing command: ['test', 'one', 'two'] (cwd /some/path)"), - ]) - self.assertEqual(len(logger.method_calls), 1) - self.assertFalse(retry.called) - - @mock.patch.dict(os.environ, clear=True, TEST_VAR1='1', TEST_VAR2='2') - @mock.patch.object(os, 'getcwd', return_value='/some/path') - @mock.patch.object(environment.Environment, 'chdir') - @mock.patch.object(utils, 'canonicalize_path', return_value='/canon/path') - @mock.patch.object(utils, 'backoff', return_value=[0]) - @mock.patch.object(subprocess, 'Popen', return_value=mock.Mock(**{ - 'returncode': 0, - 'communicate.return_value': (None, None), - })) - def test_call_retry_success_badretries(self, mock_Popen, mock_backoff, - mock_canonicalize_path, mock_chdir, - mock_getcwd): - logger = mock.Mock() - env = environment.Environment(logger) - retry = mock.Mock(return_value=True) - - result = env(['test', 'one', 'two'], retry=retry, max_tries=-1) - - self.assertEqual(result.cmd, ['test', 'one', 'two']) - self.assertEqual(result.stdout, None) - self.assertEqual(result.stderr, None) - self.assertEqual(result.return_code, 0) - self.assertFalse(mock_canonicalize_path.called) - mock_backoff.assert_called_once_with(1) - mock_Popen.assert_called_once_with( - ['test', 'one', 'two'], env=env, cwd='/some/path', close_fds=True) - logger.assert_has_calls([ - mock.call.debug( - "Executing command: ['test', 'one', 'two'] (cwd /some/path)"), - ]) - self.assertEqual(len(logger.method_calls), 1) - self.assertFalse(retry.called) - - @mock.patch.dict(os.environ, clear=True, TEST_VAR1='1', TEST_VAR2='2') - @mock.patch.object(os, 'getcwd', return_value='/some/path') - @mock.patch.object(environment.Environment, 'chdir') - @mock.patch.object(utils, 'canonicalize_path', return_value='/canon/path') - @mock.patch.object(utils, 'backoff', return_value=[0, 1, 2, 3, 4, 5, 6]) - @mock.patch.object(subprocess, 'Popen', return_value=mock.Mock(**{ - 'returncode': 0, - 'communicate.return_value': ('output', 'error'), - })) - def test_call_retry_withtries(self, mock_Popen, mock_backoff, - mock_canonicalize_path, mock_chdir, - mock_getcwd): - logger = mock.Mock() - env = environment.Environment(logger) - retry = mock.Mock(return_value=True) - exec_results = [ - mock.Mock(__nonzero__=mock.Mock(return_value=False), - __bool__=mock.Mock(return_value=False)), - mock.Mock(__nonzero__=mock.Mock(return_value=False), - __bool__=mock.Mock(return_value=False)), - mock.Mock(__nonzero__=mock.Mock(return_value=True), - __bool__=mock.Mock(return_value=True)), - ] - - with mock.patch.object(environment, 'ExecResult', - side_effect=exec_results) as mock_ExecResult: - result = env(['test', 'one', 'two'], retry=retry, max_tries=7) - - self.assertEqual(result, exec_results[-1]) - self.assertFalse(mock_canonicalize_path.called) - mock_backoff.assert_called_once_with(7) - mock_Popen.assert_has_calls([ - mock.call(['test', 'one', 'two'], env=env, cwd='/some/path', - close_fds=True, stdout=subprocess.PIPE, - stderr=subprocess.PIPE), - mock.call(['test', 'one', 'two'], env=env, cwd='/some/path', - close_fds=True, stdout=subprocess.PIPE, - stderr=subprocess.PIPE), - mock.call(['test', 'one', 'two'], env=env, cwd='/some/path', - close_fds=True, stdout=subprocess.PIPE, - stderr=subprocess.PIPE), - ]) - self.assertEqual(mock_Popen.call_count, 3) - mock_ExecResult.assert_has_calls([ - mock.call(['test', 'one', 'two'], 'output', 'error', 0), - mock.call(['test', 'one', 'two'], 'output', 'error', 0), - mock.call(['test', 'one', 'two'], 'output', 'error', 0), - ]) - self.assertEqual(mock_ExecResult.call_count, 3) - logger.assert_has_calls([ - mock.call.debug( - "Executing command: ['test', 'one', 'two'] (cwd /some/path)"), - mock.call.warn('Failure caught; retrying command (try #2)'), - mock.call.warn('Failure caught; retrying command (try #3)'), - ]) - self.assertEqual(len(logger.method_calls), 3) - retry.assert_has_calls([mock.call(res) for res in exec_results[:-1]]) - self.assertEqual(retry.call_count, len(exec_results) - 1) - - @mock.patch.dict(os.environ, clear=True, TEST_VAR1='1', TEST_VAR2='2') - @mock.patch.object(os, 'getcwd', return_value='/some/path') - @mock.patch.object(environment.Environment, 'chdir') - @mock.patch.object(utils, 'canonicalize_path', return_value='/canon/path') - @mock.patch.object(utils, 'backoff', return_value=[0, 1]) - @mock.patch.object(subprocess, 'Popen', return_value=mock.Mock(**{ - 'returncode': 0, - 'communicate.return_value': ('output', 'error'), - })) - def test_call_retry_withtries_failure(self, mock_Popen, mock_backoff, - mock_canonicalize_path, mock_chdir, - mock_getcwd): - logger = mock.Mock() - env = environment.Environment(logger) - retry = mock.Mock(return_value=True) - exec_results = [ - mock.Mock(__nonzero__=mock.Mock(return_value=False), - __bool__=mock.Mock(return_value=False)), - mock.Mock(__nonzero__=mock.Mock(return_value=False), - __bool__=mock.Mock(return_value=False)), - mock.Mock(__nonzero__=mock.Mock(return_value=True), - __bool__=mock.Mock(return_value=True)), - ] - - with mock.patch.object(environment, 'ExecResult', - side_effect=exec_results) as mock_ExecResult: - result = env(['test', 'one', 'two'], retry=retry, max_tries=2, - do_raise=False) - - self.assertEqual(result, exec_results[-2]) - self.assertFalse(mock_canonicalize_path.called) - mock_backoff.assert_called_once_with(2) - mock_Popen.assert_has_calls([ - mock.call(['test', 'one', 'two'], env=env, cwd='/some/path', - close_fds=True, stdout=subprocess.PIPE, - stderr=subprocess.PIPE), - mock.call(['test', 'one', 'two'], env=env, cwd='/some/path', - close_fds=True, stdout=subprocess.PIPE, - stderr=subprocess.PIPE), - ]) - self.assertEqual(mock_Popen.call_count, 2) - mock_ExecResult.assert_has_calls([ - mock.call(['test', 'one', 'two'], 'output', 'error', 0), - mock.call(['test', 'one', 'two'], 'output', 'error', 0), - ]) - self.assertEqual(mock_ExecResult.call_count, 2) - logger.assert_has_calls([ - mock.call.debug( - "Executing command: ['test', 'one', 'two'] (cwd /some/path)"), - mock.call.warn('Failure caught; retrying command (try #2)'), - mock.call.warn('Unable to retry: too many attempts'), - ]) - self.assertEqual(len(logger.method_calls), 3) - retry.assert_has_calls([mock.call(res) for res in exec_results[:-2]]) - self.assertEqual(retry.call_count, len(exec_results) - 1) - - @mock.patch.dict(os.environ, clear=True, TEST_VAR1='1', TEST_VAR2='2') - @mock.patch.object(os, 'getcwd', return_value='/some/path') - @mock.patch.object(utils, 'canonicalize_path', return_value='/canon/path') - def test_chdir(self, mock_canonicalize_path, mock_getcwd): - with mock.patch.object(environment.Environment, 'chdir'): - env = environment.Environment('logger') - - result = env.chdir('test/directory') - - self.assertEqual(result, '/canon/path') - self.assertEqual(env.cwd, '/canon/path') - mock_canonicalize_path.assert_called_once_with( - '/some/path', 'test/directory') - - @mock.patch.dict(os.environ, clear=True, TEST_VAR1='1', TEST_VAR2='2', - PATH='/bin') - @mock.patch.object(os, 'getcwd', return_value='/some/path') - @mock.patch.object(os.path, 'exists', return_value=False) - @mock.patch('shutil.rmtree') - @mock.patch.object(environment.Environment, 'chdir') - @mock.patch.object(environment.Environment, '__call__') - @mock.patch.object(utils, 'canonicalize_path', return_value='/canon/path') - def test_create_venv_basic(self, mock_canonicalize_path, mock_call, - mock_chdir, mock_rmtree, mock_exists, - mock_getcwd): - logger = mock.Mock() - env = environment.Environment(logger) - expected = dict(env) - expected.update({ - 'VIRTUAL_ENV': '/canon/path', - 'PATH': '/canon/path/bin:/bin', - }) - mock_chdir.reset_mock() - - new_env = env.create_venv('venv/dir') - - self.assertNotEqual(id(new_env), id(env)) - self.assertTrue(isinstance(new_env, environment.Environment)) - self.assertEqual(new_env, expected) - self.assertEqual(new_env.logger, logger) - self.assertEqual(new_env.cwd, '/some/path') - self.assertEqual(new_env.venv_home, '/canon/path') - mock_canonicalize_path.assert_called_once_with( - '/some/path', 'venv/dir') - mock_exists.assert_called_once_with('/canon/path') - self.assertFalse(mock_rmtree.called) - mock_call.assert_called_once_with(['virtualenv', '/canon/path']) - mock_chdir.assert_called_once_with('/canon/path') - logger.assert_has_calls([ - mock.call.debug('Preparing virtual environment /canon/path'), - mock.call.info('Creating virtual environment /canon/path'), - ]) - self.assertEqual(len(logger.method_calls), 2) - - @mock.patch.dict(os.environ, clear=True, TEST_VAR1='1', TEST_VAR2='2', - PATH='/bin') - @mock.patch.object(os, 'getcwd', return_value='/some/path') - @mock.patch.object(os.path, 'exists', return_value=False) - @mock.patch('shutil.rmtree') - @mock.patch.object(environment.Environment, 'chdir') - @mock.patch.object(environment.Environment, '__call__') - @mock.patch.object(utils, 'canonicalize_path', return_value='/canon/path') - def test_create_venv_update(self, mock_canonicalize_path, mock_call, - mock_chdir, mock_rmtree, mock_exists, - mock_getcwd): - logger = mock.Mock() - env = environment.Environment(logger) - expected = dict(env) - expected.update({ - 'VIRTUAL_ENV': 'bah', - 'PATH': '/canon/path/bin:/bin', - 'a': 'foo', - }) - mock_chdir.reset_mock() - - new_env = env.create_venv('venv/dir', VIRTUAL_ENV='bah', a='foo') - - self.assertNotEqual(id(new_env), id(env)) - self.assertTrue(isinstance(new_env, environment.Environment)) - self.assertEqual(new_env, expected) - self.assertEqual(new_env.logger, logger) - self.assertEqual(new_env.cwd, '/some/path') - self.assertEqual(new_env.venv_home, '/canon/path') - mock_canonicalize_path.assert_called_once_with( - '/some/path', 'venv/dir') - mock_exists.assert_called_once_with('/canon/path') - self.assertFalse(mock_rmtree.called) - mock_call.assert_called_once_with(['virtualenv', '/canon/path']) - mock_chdir.assert_called_once_with('/canon/path') - logger.assert_has_calls([ - mock.call.debug('Preparing virtual environment /canon/path'), - mock.call.info('Creating virtual environment /canon/path'), - ]) - self.assertEqual(len(logger.method_calls), 2) - - @mock.patch.dict(os.environ, clear=True, TEST_VAR1='1', TEST_VAR2='2', - PATH='/bin') - @mock.patch.object(os, 'getcwd', return_value='/some/path') - @mock.patch.object(os.path, 'exists', return_value=True) - @mock.patch('shutil.rmtree') - @mock.patch.object(environment.Environment, 'chdir') - @mock.patch.object(environment.Environment, '__call__') - @mock.patch.object(utils, 'canonicalize_path', return_value='/canon/path') - def test_create_venv_exists(self, mock_canonicalize_path, mock_call, - mock_chdir, mock_rmtree, mock_exists, - mock_getcwd): - logger = mock.Mock() - env = environment.Environment(logger) - expected = dict(env) - expected.update({ - 'VIRTUAL_ENV': '/canon/path', - 'PATH': '/canon/path/bin:/bin', - }) - mock_chdir.reset_mock() - - new_env = env.create_venv('venv/dir') - - self.assertNotEqual(id(new_env), id(env)) - self.assertTrue(isinstance(new_env, environment.Environment)) - self.assertEqual(new_env, expected) - self.assertEqual(new_env.logger, logger) - self.assertEqual(new_env.cwd, '/some/path') - self.assertEqual(new_env.venv_home, '/canon/path') - mock_canonicalize_path.assert_called_once_with( - '/some/path', 'venv/dir') - mock_exists.assert_called_once_with('/canon/path') - self.assertFalse(mock_rmtree.called) - self.assertFalse(mock_call.called) - mock_chdir.assert_called_once_with('/canon/path') - logger.assert_has_calls([ - mock.call.debug('Preparing virtual environment /canon/path'), - mock.call.info('Using existing virtual environment /canon/path'), - ]) - self.assertEqual(len(logger.method_calls), 2) - - @mock.patch.dict(os.environ, clear=True, TEST_VAR1='1', TEST_VAR2='2', - PATH='/bin') - @mock.patch.object(os, 'getcwd', return_value='/some/path') - @mock.patch.object(os.path, 'exists', return_value=True) - @mock.patch('shutil.rmtree') - @mock.patch.object(environment.Environment, 'chdir') - @mock.patch.object(environment.Environment, '__call__') - @mock.patch.object(utils, 'canonicalize_path', return_value='/canon/path') - def test_create_venv_rebuild(self, mock_canonicalize_path, mock_call, - mock_chdir, mock_rmtree, mock_exists, - mock_getcwd): - logger = mock.Mock() - env = environment.Environment(logger) - expected = dict(env) - expected.update({ - 'VIRTUAL_ENV': '/canon/path', - 'PATH': '/canon/path/bin:/bin', - }) - mock_chdir.reset_mock() - - new_env = env.create_venv('venv/dir', True) - - self.assertNotEqual(id(new_env), id(env)) - self.assertTrue(isinstance(new_env, environment.Environment)) - self.assertEqual(new_env, expected) - self.assertEqual(new_env.logger, logger) - self.assertEqual(new_env.cwd, '/some/path') - self.assertEqual(new_env.venv_home, '/canon/path') - mock_canonicalize_path.assert_called_once_with( - '/some/path', 'venv/dir') - mock_exists.assert_called_once_with('/canon/path') - mock_rmtree.assert_called_once_with('/canon/path') - mock_call.assert_called_once_with(['virtualenv', '/canon/path']) - mock_chdir.assert_called_once_with('/canon/path') - logger.assert_has_calls([ - mock.call.debug('Preparing virtual environment /canon/path'), - mock.call.info('Destroying old virtual environment /canon/path'), - mock.call.info('Creating virtual environment /canon/path'), - ]) - self.assertEqual(len(logger.method_calls), 3) diff --git a/tox.ini b/tox.ini deleted file mode 100644 index 7c25551..0000000 --- a/tox.ini +++ /dev/null @@ -1,27 +0,0 @@ -[tox] -envlist = py26,py27,py33,py34,pep8 - -[testenv] -setenv = LANG=en_US.UTF-8 - LANGUAGE=en_US:en - LC_ALL=C - -deps = -r{toxinidir}/requirements.txt - -r{toxinidir}/test-requirements.txt -commands = nosetests -v {posargs} - -[testenv:pep8] -deps = pep8 -commands = pep8 --repeat --show-source striker tests - -[testenv:cover] -deps = -r{toxinidir}/requirements.txt - -r{toxinidir}/test-requirements.txt -commands = nosetests -v --with-coverage --cover-package=striker \ - --cover-branches --cover-html --cover-html-dir=cov_html \ - {posargs} - -[testenv:shell] -deps = -r{toxinidir}/requirements.txt - -r{toxinidir}/test-requirements.txt -commands = {posargs}