Move Orchestration methods into Proxy
- Switch cloud.orchestration methods to consume proxy directly. - Add additional attributes to stack. - Add "query parameter" resolve_outputs to stack with and extend general _find/fetch method to be able to consume additional attributes (normally through overloading). Change-Id: I8c4cb174d8b2d7802e2e94ec907fdebfcdcc9859
This commit is contained in:
parent
c7e3f3e7c6
commit
16afacbc61
@ -20,6 +20,8 @@ import datetime
|
|||||||
import munch
|
import munch
|
||||||
import six
|
import six
|
||||||
|
|
||||||
|
from openstack import resource
|
||||||
|
|
||||||
_IMAGE_FIELDS = (
|
_IMAGE_FIELDS = (
|
||||||
'checksum',
|
'checksum',
|
||||||
'container_format',
|
'container_format',
|
||||||
@ -1082,6 +1084,9 @@ class Normalizer(object):
|
|||||||
|
|
||||||
def _normalize_stack(self, stack):
|
def _normalize_stack(self, stack):
|
||||||
"""Normalize Heat Stack"""
|
"""Normalize Heat Stack"""
|
||||||
|
if isinstance(stack, resource.Resource):
|
||||||
|
stack = stack.to_dict(ignore_none=True, original_names=True)
|
||||||
|
else:
|
||||||
stack = stack.copy()
|
stack = stack.copy()
|
||||||
|
|
||||||
# Discard noise
|
# Discard noise
|
||||||
@ -1092,7 +1097,10 @@ class Normalizer(object):
|
|||||||
stack.pop('status', None)
|
stack.pop('status', None)
|
||||||
stack.pop('identifier', None)
|
stack.pop('identifier', None)
|
||||||
|
|
||||||
stack_status = stack.pop('stack_status')
|
stack_status = None
|
||||||
|
|
||||||
|
stack_status = stack.pop('stack_status', None) or \
|
||||||
|
stack.pop('status', None)
|
||||||
(action, status) = stack_status.split('_', 1)
|
(action, status) = stack_status.split('_', 1)
|
||||||
|
|
||||||
ret = munch.Munch(
|
ret = munch.Munch(
|
||||||
@ -1121,13 +1129,13 @@ class Normalizer(object):
|
|||||||
('tempate_description', 'template_description'),
|
('tempate_description', 'template_description'),
|
||||||
('timeout_mins', 'timeout_mins'),
|
('timeout_mins', 'timeout_mins'),
|
||||||
('tags', 'tags')):
|
('tags', 'tags')):
|
||||||
value = stack.pop(old_name, None)
|
value = stack.get(old_name, None)
|
||||||
ret[new_name] = value
|
ret[new_name] = value
|
||||||
if not self.strict_mode:
|
if not self.strict_mode:
|
||||||
ret[old_name] = value
|
ret[old_name] = value
|
||||||
ret['identifier'] = '{name}/{id}'.format(
|
ret['identifier'] = '{name}/{id}'.format(
|
||||||
name=ret['name'], id=ret['id'])
|
name=ret['name'], id=ret['id'])
|
||||||
ret['properties'] = stack
|
# ret['properties'] = stack
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def _normalize_machines(self, machines):
|
def _normalize_machines(self, machines):
|
||||||
|
@ -16,8 +16,7 @@
|
|||||||
import types # noqa
|
import types # noqa
|
||||||
|
|
||||||
from openstack.cloud import exc
|
from openstack.cloud import exc
|
||||||
from openstack.cloud._heat import event_utils
|
from openstack.orchestration.util import event_utils
|
||||||
from openstack.cloud._heat import template_utils
|
|
||||||
from openstack.cloud import _normalize
|
from openstack.cloud import _normalize
|
||||||
from openstack.cloud import _utils
|
from openstack.cloud import _utils
|
||||||
|
|
||||||
@ -43,13 +42,9 @@ class OrchestrationCloudMixin(_normalize.Normalizer):
|
|||||||
def get_template_contents(
|
def get_template_contents(
|
||||||
self, template_file=None, template_url=None,
|
self, template_file=None, template_url=None,
|
||||||
template_object=None, files=None):
|
template_object=None, files=None):
|
||||||
try:
|
return self.orchestration.get_template_contents(
|
||||||
return template_utils.get_template_contents(
|
|
||||||
template_file=template_file, template_url=template_url,
|
template_file=template_file, template_url=template_url,
|
||||||
template_object=template_object, files=files)
|
template_object=template_object, files=files)
|
||||||
except Exception as e:
|
|
||||||
raise exc.OpenStackCloudException(
|
|
||||||
"Error in processing template files: %s" % str(e))
|
|
||||||
|
|
||||||
def create_stack(
|
def create_stack(
|
||||||
self, name, tags=None,
|
self, name, tags=None,
|
||||||
@ -83,24 +78,18 @@ class OrchestrationCloudMixin(_normalize.Normalizer):
|
|||||||
:raises: ``OpenStackCloudException`` if something goes wrong during
|
:raises: ``OpenStackCloudException`` if something goes wrong during
|
||||||
the OpenStack API call
|
the OpenStack API call
|
||||||
"""
|
"""
|
||||||
envfiles, env = template_utils.process_multiple_environments_and_files(
|
|
||||||
env_paths=environment_files)
|
|
||||||
tpl_files, template = template_utils.get_template_contents(
|
|
||||||
template_file=template_file,
|
|
||||||
template_url=template_url,
|
|
||||||
template_object=template_object,
|
|
||||||
files=files)
|
|
||||||
params = dict(
|
params = dict(
|
||||||
stack_name=name,
|
|
||||||
tags=tags,
|
tags=tags,
|
||||||
disable_rollback=not rollback,
|
is_rollback_disabled=not rollback,
|
||||||
parameters=parameters,
|
|
||||||
template=template,
|
|
||||||
files=dict(list(tpl_files.items()) + list(envfiles.items())),
|
|
||||||
environment=env,
|
|
||||||
timeout_mins=timeout // 60,
|
timeout_mins=timeout // 60,
|
||||||
|
parameters=parameters
|
||||||
)
|
)
|
||||||
self._orchestration_client.post('/stacks', json=params)
|
params.update(self.orchestration.read_env_and_templates(
|
||||||
|
template_file=template_file, template_url=template_url,
|
||||||
|
template_object=template_object, files=files,
|
||||||
|
environment_files=environment_files
|
||||||
|
))
|
||||||
|
self.orchestration.create_stack(name=name, **params)
|
||||||
if wait:
|
if wait:
|
||||||
event_utils.poll_for_events(self, stack_name=name,
|
event_utils.poll_for_events(self, stack_name=name,
|
||||||
action='CREATE')
|
action='CREATE')
|
||||||
@ -137,30 +126,26 @@ class OrchestrationCloudMixin(_normalize.Normalizer):
|
|||||||
:raises: ``OpenStackCloudException`` if something goes wrong during
|
:raises: ``OpenStackCloudException`` if something goes wrong during
|
||||||
the OpenStack API calls
|
the OpenStack API calls
|
||||||
"""
|
"""
|
||||||
envfiles, env = template_utils.process_multiple_environments_and_files(
|
|
||||||
env_paths=environment_files)
|
|
||||||
tpl_files, template = template_utils.get_template_contents(
|
|
||||||
template_file=template_file,
|
|
||||||
template_url=template_url,
|
|
||||||
template_object=template_object,
|
|
||||||
files=files)
|
|
||||||
params = dict(
|
params = dict(
|
||||||
disable_rollback=not rollback,
|
|
||||||
tags=tags,
|
tags=tags,
|
||||||
parameters=parameters,
|
is_rollback_disabled=not rollback,
|
||||||
template=template,
|
|
||||||
files=dict(list(tpl_files.items()) + list(envfiles.items())),
|
|
||||||
environment=env,
|
|
||||||
timeout_mins=timeout // 60,
|
timeout_mins=timeout // 60,
|
||||||
|
parameters=parameters
|
||||||
)
|
)
|
||||||
|
params.update(self.orchestration.read_env_and_templates(
|
||||||
|
template_file=template_file, template_url=template_url,
|
||||||
|
template_object=template_object, files=files,
|
||||||
|
environment_files=environment_files
|
||||||
|
))
|
||||||
if wait:
|
if wait:
|
||||||
# find the last event to use as the marker
|
# find the last event to use as the marker
|
||||||
events = event_utils.get_events(
|
events = event_utils.get_events(
|
||||||
self, name_or_id, event_args={'sort_dir': 'desc', 'limit': 1})
|
self, name_or_id, event_args={'sort_dir': 'desc', 'limit': 1})
|
||||||
marker = events[0].id if events else None
|
marker = events[0].id if events else None
|
||||||
|
|
||||||
self._orchestration_client.put(
|
# Not to cause update of ID field pass stack as dict
|
||||||
'/stacks/{name_or_id}'.format(name_or_id=name_or_id), json=params)
|
self.orchestration.update_stack(stack={'id': name_or_id}, **params)
|
||||||
|
|
||||||
if wait:
|
if wait:
|
||||||
event_utils.poll_for_events(self,
|
event_utils.poll_for_events(self,
|
||||||
name_or_id,
|
name_or_id,
|
||||||
@ -190,8 +175,7 @@ class OrchestrationCloudMixin(_normalize.Normalizer):
|
|||||||
self, name_or_id, event_args={'sort_dir': 'desc', 'limit': 1})
|
self, name_or_id, event_args={'sort_dir': 'desc', 'limit': 1})
|
||||||
marker = events[0].id if events else None
|
marker = events[0].id if events else None
|
||||||
|
|
||||||
self._orchestration_client.delete(
|
self.orchestration.delete_stack(stack)
|
||||||
'/stacks/{id}'.format(id=stack['id']))
|
|
||||||
|
|
||||||
if wait:
|
if wait:
|
||||||
try:
|
try:
|
||||||
@ -233,10 +217,8 @@ class OrchestrationCloudMixin(_normalize.Normalizer):
|
|||||||
:raises: ``OpenStackCloudException`` if something goes wrong during the
|
:raises: ``OpenStackCloudException`` if something goes wrong during the
|
||||||
OpenStack API call.
|
OpenStack API call.
|
||||||
"""
|
"""
|
||||||
data = self._orchestration_client.get(
|
data = self.orchestration.stacks()
|
||||||
'/stacks', error_message="Error fetching stack list")
|
return self._normalize_stacks(data)
|
||||||
return self._normalize_stacks(
|
|
||||||
self._get_and_munchify('stacks', data))
|
|
||||||
|
|
||||||
def get_stack(self, name_or_id, filters=None, resolve_outputs=True):
|
def get_stack(self, name_or_id, filters=None, resolve_outputs=True):
|
||||||
"""Get exactly one stack.
|
"""Get exactly one stack.
|
||||||
@ -257,15 +239,11 @@ class OrchestrationCloudMixin(_normalize.Normalizer):
|
|||||||
# stack names are mandatory and enforced unique in the project
|
# stack names are mandatory and enforced unique in the project
|
||||||
# so a StackGet can always be used for name or ID.
|
# so a StackGet can always be used for name or ID.
|
||||||
try:
|
try:
|
||||||
url = '/stacks/{name_or_id}'.format(name_or_id=name_or_id)
|
stack = self.orchestration.find_stack(
|
||||||
if not resolve_outputs:
|
name_or_id,
|
||||||
url = '{url}?resolve_outputs=False'.format(url=url)
|
ignore_missing=False,
|
||||||
data = self._orchestration_client.get(
|
resolve_outputs=resolve_outputs)
|
||||||
url,
|
if stack.status == 'DELETE_COMPLETE':
|
||||||
error_message="Error fetching stack")
|
|
||||||
stack = self._get_and_munchify('stack', data)
|
|
||||||
# Treat DELETE_COMPLETE stacks as a NotFound
|
|
||||||
if stack['stack_status'] == 'DELETE_COMPLETE':
|
|
||||||
return []
|
return []
|
||||||
except exc.OpenStackCloudURINotFound:
|
except exc.OpenStackCloudURINotFound:
|
||||||
return []
|
return []
|
||||||
|
@ -12,7 +12,7 @@
|
|||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
from openstack.cloud._heat import template_format
|
from openstack.orchestration.util import template_format
|
||||||
|
|
||||||
|
|
||||||
SECTIONS = (
|
SECTIONS = (
|
@ -18,10 +18,10 @@ import six
|
|||||||
from six.moves.urllib import parse
|
from six.moves.urllib import parse
|
||||||
from six.moves.urllib import request
|
from six.moves.urllib import request
|
||||||
|
|
||||||
from openstack.cloud._heat import environment_format
|
from openstack.orchestration.util import environment_format
|
||||||
from openstack.cloud._heat import template_format
|
from openstack.orchestration.util import template_format
|
||||||
from openstack.cloud._heat import utils
|
from openstack.orchestration.util import utils
|
||||||
from openstack.cloud import exc
|
from openstack import exceptions
|
||||||
|
|
||||||
|
|
||||||
def get_template_contents(template_file=None, template_url=None,
|
def get_template_contents(template_file=None, template_url=None,
|
||||||
@ -46,12 +46,12 @@ def get_template_contents(template_file=None, template_url=None,
|
|||||||
elif existing:
|
elif existing:
|
||||||
return {}, None
|
return {}, None
|
||||||
else:
|
else:
|
||||||
raise exc.OpenStackCloudException(
|
raise exceptions.SDKException(
|
||||||
'Must provide one of template_file,'
|
'Must provide one of template_file,'
|
||||||
' template_url or template_object')
|
' template_url or template_object')
|
||||||
|
|
||||||
if not tpl:
|
if not tpl:
|
||||||
raise exc.OpenStackCloudException(
|
raise exceptions.SDKException(
|
||||||
'Could not fetch template from %s' % template_url)
|
'Could not fetch template from %s' % template_url)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -59,7 +59,7 @@ def get_template_contents(template_file=None, template_url=None,
|
|||||||
tpl = tpl.decode('utf-8')
|
tpl = tpl.decode('utf-8')
|
||||||
template = template_format.parse(tpl)
|
template = template_format.parse(tpl)
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise exc.OpenStackCloudException(
|
raise exceptions.SDKException(
|
||||||
'Error parsing template %(url)s %(error)s' %
|
'Error parsing template %(url)s %(error)s' %
|
||||||
{'url': template_url, 'error': e})
|
{'url': template_url, 'error': e})
|
||||||
|
|
@ -20,7 +20,7 @@ from six.moves.urllib import error
|
|||||||
from six.moves.urllib import parse
|
from six.moves.urllib import parse
|
||||||
from six.moves.urllib import request
|
from six.moves.urllib import request
|
||||||
|
|
||||||
from openstack.cloud import exc
|
from openstack import exceptions
|
||||||
|
|
||||||
|
|
||||||
def base_url_for_url(url):
|
def base_url_for_url(url):
|
||||||
@ -41,7 +41,7 @@ def read_url_content(url):
|
|||||||
# TODO(mordred) Use requests
|
# TODO(mordred) Use requests
|
||||||
content = request.urlopen(url).read()
|
content = request.urlopen(url).read()
|
||||||
except error.URLError:
|
except error.URLError:
|
||||||
raise exc.OpenStackCloudException(
|
raise exceptions.SDKException(
|
||||||
'Could not fetch contents for %s' % url)
|
'Could not fetch contents for %s' % url)
|
||||||
|
|
||||||
if content:
|
if content:
|
@ -10,7 +10,6 @@
|
|||||||
# License for the specific language governing permissions and limitations
|
# License for the specific language governing permissions and limitations
|
||||||
# under the License.
|
# under the License.
|
||||||
|
|
||||||
from openstack import exceptions
|
|
||||||
from openstack.orchestration.v1 import resource as _resource
|
from openstack.orchestration.v1 import resource as _resource
|
||||||
from openstack.orchestration.v1 import software_config as _sc
|
from openstack.orchestration.v1 import software_config as _sc
|
||||||
from openstack.orchestration.v1 import software_deployment as _sd
|
from openstack.orchestration.v1 import software_deployment as _sd
|
||||||
@ -19,12 +18,51 @@ from openstack.orchestration.v1 import stack_environment as _stack_environment
|
|||||||
from openstack.orchestration.v1 import stack_files as _stack_files
|
from openstack.orchestration.v1 import stack_files as _stack_files
|
||||||
from openstack.orchestration.v1 import stack_template as _stack_template
|
from openstack.orchestration.v1 import stack_template as _stack_template
|
||||||
from openstack.orchestration.v1 import template as _template
|
from openstack.orchestration.v1 import template as _template
|
||||||
|
from openstack.orchestration.util import template_utils
|
||||||
|
from openstack import exceptions
|
||||||
from openstack import proxy
|
from openstack import proxy
|
||||||
from openstack import resource
|
from openstack import resource
|
||||||
|
|
||||||
|
|
||||||
class Proxy(proxy.Proxy):
|
class Proxy(proxy.Proxy):
|
||||||
|
|
||||||
|
def read_env_and_templates(self, template_file=None, template_url=None,
|
||||||
|
template_object=None, files=None,
|
||||||
|
environment_files=None):
|
||||||
|
"""Read templates and environment content and prepares
|
||||||
|
corresponding stack attributes
|
||||||
|
|
||||||
|
:param string template_file: Path to the template.
|
||||||
|
:param string template_url: URL of template.
|
||||||
|
:param string template_object: URL to retrieve template object.
|
||||||
|
:param dict files: dict of additional file content to include.
|
||||||
|
:param environment_files: Paths to environment files to apply.
|
||||||
|
|
||||||
|
:returns: Attributes dict to be set on the
|
||||||
|
:class:`~openstack.orchestration.v1.stack.Stack`
|
||||||
|
:rtype: dict
|
||||||
|
"""
|
||||||
|
stack_attrs = dict()
|
||||||
|
envfiles = None
|
||||||
|
tpl_files = None
|
||||||
|
if environment_files:
|
||||||
|
envfiles, env = \
|
||||||
|
template_utils.process_multiple_environments_and_files(
|
||||||
|
env_paths=environment_files)
|
||||||
|
stack_attrs['environment'] = env
|
||||||
|
if template_file or template_url or template_object:
|
||||||
|
tpl_files, template = template_utils.get_template_contents(
|
||||||
|
template_file=template_file,
|
||||||
|
template_url=template_url,
|
||||||
|
template_object=template_object,
|
||||||
|
files=files)
|
||||||
|
stack_attrs['template'] = template
|
||||||
|
if tpl_files or envfiles:
|
||||||
|
stack_attrs['files'] = dict(
|
||||||
|
list(tpl_files.items()) + list(envfiles.items())
|
||||||
|
)
|
||||||
|
return stack_attrs
|
||||||
|
|
||||||
def create_stack(self, preview=False, **attrs):
|
def create_stack(self, preview=False, **attrs):
|
||||||
"""Create a new stack from attributes
|
"""Create a new stack from attributes
|
||||||
|
|
||||||
@ -38,10 +76,12 @@ class Proxy(proxy.Proxy):
|
|||||||
:returns: The results of stack creation
|
:returns: The results of stack creation
|
||||||
:rtype: :class:`~openstack.orchestration.v1.stack.Stack`
|
:rtype: :class:`~openstack.orchestration.v1.stack.Stack`
|
||||||
"""
|
"""
|
||||||
|
|
||||||
base_path = None if not preview else '/stacks/preview'
|
base_path = None if not preview else '/stacks/preview'
|
||||||
return self._create(_stack.Stack, base_path=base_path, **attrs)
|
return self._create(_stack.Stack, base_path=base_path, **attrs)
|
||||||
|
|
||||||
def find_stack(self, name_or_id, ignore_missing=True):
|
def find_stack(self, name_or_id,
|
||||||
|
ignore_missing=True, resolve_outputs=True):
|
||||||
"""Find a single stack
|
"""Find a single stack
|
||||||
|
|
||||||
:param name_or_id: The name or ID of a stack.
|
:param name_or_id: The name or ID of a stack.
|
||||||
@ -53,7 +93,8 @@ class Proxy(proxy.Proxy):
|
|||||||
:returns: One :class:`~openstack.orchestration.v1.stack.Stack` or None
|
:returns: One :class:`~openstack.orchestration.v1.stack.Stack` or None
|
||||||
"""
|
"""
|
||||||
return self._find(_stack.Stack, name_or_id,
|
return self._find(_stack.Stack, name_or_id,
|
||||||
ignore_missing=ignore_missing)
|
ignore_missing=ignore_missing,
|
||||||
|
resolve_outputs=resolve_outputs)
|
||||||
|
|
||||||
def stacks(self, **query):
|
def stacks(self, **query):
|
||||||
"""Return a generator of stacks
|
"""Return a generator of stacks
|
||||||
@ -66,17 +107,18 @@ class Proxy(proxy.Proxy):
|
|||||||
"""
|
"""
|
||||||
return self._list(_stack.Stack, **query)
|
return self._list(_stack.Stack, **query)
|
||||||
|
|
||||||
def get_stack(self, stack):
|
def get_stack(self, stack, resolve_outputs=True):
|
||||||
"""Get a single stack
|
"""Get a single stack
|
||||||
|
|
||||||
:param stack: The value can be the ID of a stack or a
|
:param stack: The value can be the ID of a stack or a
|
||||||
:class:`~openstack.orchestration.v1.stack.Stack` instance.
|
:class:`~openstack.orchestration.v1.stack.Stack` instance.
|
||||||
|
:param resolve_outputs: Whether stack should contain outputs resolved.
|
||||||
|
|
||||||
:returns: One :class:`~openstack.orchestration.v1.stack.Stack`
|
:returns: One :class:`~openstack.orchestration.v1.stack.Stack`
|
||||||
:raises: :class:`~openstack.exceptions.ResourceNotFound`
|
:raises: :class:`~openstack.exceptions.ResourceNotFound`
|
||||||
when no resource can be found.
|
when no resource can be found.
|
||||||
"""
|
"""
|
||||||
return self._get(_stack.Stack, stack)
|
return self._get(_stack.Stack, stack, resolve_outputs=resolve_outputs)
|
||||||
|
|
||||||
def update_stack(self, stack, preview=False, **attrs):
|
def update_stack(self, stack, preview=False, **attrs):
|
||||||
"""Update a stack
|
"""Update a stack
|
||||||
@ -411,3 +453,14 @@ class Proxy(proxy.Proxy):
|
|||||||
to delete failed to occur in the specified seconds.
|
to delete failed to occur in the specified seconds.
|
||||||
"""
|
"""
|
||||||
return resource.wait_for_delete(self, res, interval, wait)
|
return resource.wait_for_delete(self, res, interval, wait)
|
||||||
|
|
||||||
|
def get_template_contents(
|
||||||
|
self, template_file=None, template_url=None,
|
||||||
|
template_object=None, files=None):
|
||||||
|
try:
|
||||||
|
return template_utils.get_template_contents(
|
||||||
|
template_file=template_file, template_url=template_url,
|
||||||
|
template_object=template_object, files=files)
|
||||||
|
except Exception as e:
|
||||||
|
raise exceptions.SDKException(
|
||||||
|
"Error in processing template files: %s" % str(e))
|
||||||
|
@ -28,6 +28,10 @@ class Stack(resource.Resource):
|
|||||||
allow_commit = True
|
allow_commit = True
|
||||||
allow_delete = True
|
allow_delete = True
|
||||||
|
|
||||||
|
_query_mapping = resource.QueryParameters(
|
||||||
|
'resolve_outputs'
|
||||||
|
)
|
||||||
|
|
||||||
# Properties
|
# Properties
|
||||||
#: A list of resource objects that will be added if a stack update
|
#: A list of resource objects that will be added if a stack update
|
||||||
# is performed.
|
# is performed.
|
||||||
@ -42,6 +46,17 @@ class Stack(resource.Resource):
|
|||||||
#: A list of resource objects that will be deleted if a stack
|
#: A list of resource objects that will be deleted if a stack
|
||||||
#: update is performed.
|
#: update is performed.
|
||||||
deleted = resource.Body('deleted', type=list)
|
deleted = resource.Body('deleted', type=list)
|
||||||
|
#: Timestamp of the stack deletion.
|
||||||
|
deleted_at = resource.Body('deletion_time')
|
||||||
|
#: A JSON environment for the stack.
|
||||||
|
environment = resource.Body('environment')
|
||||||
|
#: An ordered list of names for environment files found in the files dict.
|
||||||
|
environment_files = resource.Body('environment_files', type=list)
|
||||||
|
#: Additional files referenced in the template or the environment
|
||||||
|
files = resource.Body('files', type=dict)
|
||||||
|
#: Name of the container in swift that has child
|
||||||
|
#: templates and environment files.
|
||||||
|
files_container = resource.Body('files_container')
|
||||||
#: Whether the stack will support a rollback operation on stack
|
#: Whether the stack will support a rollback operation on stack
|
||||||
#: create/update failures. *Type: bool*
|
#: create/update failures. *Type: bool*
|
||||||
is_rollback_disabled = resource.Body('disable_rollback', type=bool)
|
is_rollback_disabled = resource.Body('disable_rollback', type=bool)
|
||||||
@ -105,9 +120,20 @@ class Stack(resource.Resource):
|
|||||||
def update(self, session, preview=False):
|
def update(self, session, preview=False):
|
||||||
# This overrides the default behavior of resource update because
|
# This overrides the default behavior of resource update because
|
||||||
# we need to use other endpoint for update preview.
|
# we need to use other endpoint for update preview.
|
||||||
|
base_path = None
|
||||||
|
if self.name and self.id:
|
||||||
|
base_path = '/stacks/%(stack_name)s/%(stack_id)s' % {
|
||||||
|
'stack_name': self.name,
|
||||||
|
'stack_id': self.id}
|
||||||
|
elif self.name or self.id:
|
||||||
|
# We have only one of name/id. Do not try to build a stacks/NAME/ID
|
||||||
|
# path
|
||||||
|
base_path = '/stacks/%(stack_identity)s' % {
|
||||||
|
'stack_identity': self.name or self.id}
|
||||||
request = self._prepare_request(
|
request = self._prepare_request(
|
||||||
prepend_key=False,
|
prepend_key=False,
|
||||||
base_path='/stacks/%(stack_name)s/' % {'stack_name': self.name})
|
requires_id=False,
|
||||||
|
base_path=base_path)
|
||||||
|
|
||||||
microversion = self._get_microversion_for(session, 'commit')
|
microversion = self._get_microversion_for(session, 'commit')
|
||||||
|
|
||||||
@ -139,16 +165,77 @@ class Stack(resource.Resource):
|
|||||||
return resp.json()
|
return resp.json()
|
||||||
|
|
||||||
def fetch(self, session, requires_id=True,
|
def fetch(self, session, requires_id=True,
|
||||||
base_path=None, error_message=None):
|
base_path=None, error_message=None, resolve_outputs=True):
|
||||||
stk = super(Stack, self).fetch(
|
|
||||||
session,
|
if not self.allow_fetch:
|
||||||
requires_id=requires_id,
|
raise exceptions.MethodNotSupported(self, "fetch")
|
||||||
base_path=base_path,
|
|
||||||
error_message=error_message)
|
request = self._prepare_request(requires_id=requires_id,
|
||||||
if stk and stk.status in ['DELETE_COMPLETE', 'ADOPT_COMPLETE']:
|
base_path=base_path)
|
||||||
|
# session = self._get_session(session)
|
||||||
|
microversion = self._get_microversion_for(session, 'fetch')
|
||||||
|
|
||||||
|
# NOTE(gtema): would be nice to simply use QueryParameters, however
|
||||||
|
# Heat return 302 with parameters being set into URL and requests
|
||||||
|
# apply parameters again, what results in them being set doubled
|
||||||
|
if not resolve_outputs:
|
||||||
|
request.url = request.url + '?resolve_outputs=False'
|
||||||
|
response = session.get(request.url, microversion=microversion)
|
||||||
|
kwargs = {}
|
||||||
|
if error_message:
|
||||||
|
kwargs['error_message'] = error_message
|
||||||
|
|
||||||
|
self.microversion = microversion
|
||||||
|
self._translate_response(response, **kwargs)
|
||||||
|
|
||||||
|
if self and self.status in ['DELETE_COMPLETE', 'ADOPT_COMPLETE']:
|
||||||
raise exceptions.ResourceNotFound(
|
raise exceptions.ResourceNotFound(
|
||||||
"No stack found for %s" % stk.id)
|
"No stack found for %s" % self.id)
|
||||||
return stk
|
return self
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def find(cls, session, name_or_id, ignore_missing=True, **params):
|
||||||
|
"""Find a resource by its name or id.
|
||||||
|
|
||||||
|
:param session: The session to use for making this request.
|
||||||
|
:type session: :class:`~keystoneauth1.adapter.Adapter`
|
||||||
|
:param name_or_id: This resource's identifier, if needed by
|
||||||
|
the request. The default is ``None``.
|
||||||
|
:param bool ignore_missing: When set to ``False``
|
||||||
|
:class:`~openstack.exceptions.ResourceNotFound` will be
|
||||||
|
raised when the resource does not exist.
|
||||||
|
When set to ``True``, None will be returned when
|
||||||
|
attempting to find a nonexistent resource.
|
||||||
|
:param dict params: Any additional parameters to be passed into
|
||||||
|
underlying methods, such as to
|
||||||
|
:meth:`~openstack.resource.Resource.existing`
|
||||||
|
in order to pass on URI parameters.
|
||||||
|
|
||||||
|
:return: The :class:`Resource` object matching the given name or id
|
||||||
|
or None if nothing matches.
|
||||||
|
:raises: :class:`openstack.exceptions.DuplicateResource` if more
|
||||||
|
than one resource is found for this request.
|
||||||
|
:raises: :class:`openstack.exceptions.ResourceNotFound` if nothing
|
||||||
|
is found and ignore_missing is ``False``.
|
||||||
|
"""
|
||||||
|
session = cls._get_session(session)
|
||||||
|
# Try to short-circuit by looking directly for a matching ID.
|
||||||
|
try:
|
||||||
|
match = cls.existing(
|
||||||
|
id=name_or_id,
|
||||||
|
connection=session._get_connection(),
|
||||||
|
**params)
|
||||||
|
return match.fetch(session, **params)
|
||||||
|
except exceptions.NotFoundException:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# NOTE(gtema) we do not do list, since previous call has done this
|
||||||
|
# for us already
|
||||||
|
|
||||||
|
if ignore_missing:
|
||||||
|
return None
|
||||||
|
raise exceptions.ResourceNotFound(
|
||||||
|
"No %s found for %s" % (cls.__name__, name_or_id))
|
||||||
|
|
||||||
|
|
||||||
StackPreview = Stack
|
StackPreview = Stack
|
||||||
|
@ -971,6 +971,7 @@ class Resource(dict):
|
|||||||
has_body = self.has_body
|
has_body = self.has_body
|
||||||
exceptions.raise_from_response(response, error_message=error_message)
|
exceptions.raise_from_response(response, error_message=error_message)
|
||||||
if has_body:
|
if has_body:
|
||||||
|
try:
|
||||||
body = response.json()
|
body = response.json()
|
||||||
if self.resource_key and self.resource_key in body:
|
if self.resource_key and self.resource_key in body:
|
||||||
body = body[self.resource_key]
|
body = body[self.resource_key]
|
||||||
@ -981,6 +982,10 @@ class Resource(dict):
|
|||||||
if self.commit_jsonpatch or self.allow_patch:
|
if self.commit_jsonpatch or self.allow_patch:
|
||||||
# We need the original body to compare against
|
# We need the original body to compare against
|
||||||
self._original_body = body.copy()
|
self._original_body = body.copy()
|
||||||
|
except ValueError:
|
||||||
|
# Server returned not parse-able response (202, 204, etc)
|
||||||
|
# Do simply nothing
|
||||||
|
pass
|
||||||
|
|
||||||
headers = self._consume_header_attrs(response.headers)
|
headers = self._consume_header_attrs(response.headers)
|
||||||
self._header.attributes.update(headers)
|
self._header.attributes.update(headers)
|
||||||
@ -1127,7 +1132,7 @@ class Resource(dict):
|
|||||||
return self
|
return self
|
||||||
|
|
||||||
def fetch(self, session, requires_id=True,
|
def fetch(self, session, requires_id=True,
|
||||||
base_path=None, error_message=None):
|
base_path=None, error_message=None, **params):
|
||||||
"""Get a remote resource based on this instance.
|
"""Get a remote resource based on this instance.
|
||||||
|
|
||||||
:param session: The session to use for making this request.
|
:param session: The session to use for making this request.
|
||||||
@ -1139,6 +1144,7 @@ class Resource(dict):
|
|||||||
:data:`~openstack.resource.Resource.base_path`.
|
:data:`~openstack.resource.Resource.base_path`.
|
||||||
:param str error_message: An Error message to be returned if
|
:param str error_message: An Error message to be returned if
|
||||||
requested object does not exist.
|
requested object does not exist.
|
||||||
|
:param dict params: Additional parameters that can be consumed.
|
||||||
:return: This :class:`Resource` instance.
|
:return: This :class:`Resource` instance.
|
||||||
:raises: :exc:`~openstack.exceptions.MethodNotSupported` if
|
:raises: :exc:`~openstack.exceptions.MethodNotSupported` if
|
||||||
:data:`Resource.allow_fetch` is not set to ``True``.
|
:data:`Resource.allow_fetch` is not set to ``True``.
|
||||||
@ -1577,7 +1583,7 @@ class Resource(dict):
|
|||||||
id=name_or_id,
|
id=name_or_id,
|
||||||
connection=session._get_connection(),
|
connection=session._get_connection(),
|
||||||
**params)
|
**params)
|
||||||
return match.fetch(session)
|
return match.fetch(session, **params)
|
||||||
except exceptions.NotFoundException:
|
except exceptions.NotFoundException:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
@ -21,7 +21,7 @@ import datetime
|
|||||||
import json
|
import json
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from openstack.cloud._heat import template_format
|
from openstack.orchestration.util import template_format
|
||||||
from openstack.cloud import meta
|
from openstack.cloud import meta
|
||||||
|
|
||||||
PROJECT_ID = '1c36b64c840a42cd9e9b931a369337f0'
|
PROJECT_ID = '1c36b64c840a42cd9e9b931a369337f0'
|
||||||
|
@ -15,10 +15,11 @@ import tempfile
|
|||||||
import testtools
|
import testtools
|
||||||
|
|
||||||
import openstack.cloud
|
import openstack.cloud
|
||||||
from openstack.cloud import meta
|
|
||||||
from openstack.tests import fakes
|
from openstack.tests import fakes
|
||||||
from openstack.tests.unit import base
|
from openstack.tests.unit import base
|
||||||
|
|
||||||
|
from openstack.orchestration.v1 import stack
|
||||||
|
|
||||||
|
|
||||||
class TestStack(base.TestCase):
|
class TestStack(base.TestCase):
|
||||||
|
|
||||||
@ -44,7 +45,8 @@ class TestStack(base.TestCase):
|
|||||||
])
|
])
|
||||||
stacks = self.cloud.list_stacks()
|
stacks = self.cloud.list_stacks()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
[f.toDict() for f in self.cloud._normalize_stacks(fake_stacks)],
|
[f.toDict() for f in self.cloud._normalize_stacks(
|
||||||
|
stack.Stack(**st) for st in fake_stacks)],
|
||||||
[f.toDict() for f in stacks])
|
[f.toDict() for f in stacks])
|
||||||
|
|
||||||
self.assert_calls()
|
self.assert_calls()
|
||||||
@ -76,7 +78,8 @@ class TestStack(base.TestCase):
|
|||||||
])
|
])
|
||||||
stacks = self.cloud.search_stacks()
|
stacks = self.cloud.search_stacks()
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.cloud._normalize_stacks(meta.obj_list_to_munch(fake_stacks)),
|
self.cloud._normalize_stacks(
|
||||||
|
stack.Stack(**st) for st in fake_stacks),
|
||||||
stacks)
|
stacks)
|
||||||
self.assert_calls()
|
self.assert_calls()
|
||||||
|
|
||||||
@ -98,7 +101,7 @@ class TestStack(base.TestCase):
|
|||||||
stacks = self.cloud.search_stacks(filters=filters)
|
stacks = self.cloud.search_stacks(filters=filters)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
self.cloud._normalize_stacks(
|
self.cloud._normalize_stacks(
|
||||||
meta.obj_list_to_munch(fake_stacks[1:])),
|
stack.Stack(**st) for st in fake_stacks[1:]),
|
||||||
stacks)
|
stacks)
|
||||||
self.assert_calls()
|
self.assert_calls()
|
||||||
|
|
||||||
@ -316,8 +319,6 @@ class TestStack(base.TestCase):
|
|||||||
validate=dict(
|
validate=dict(
|
||||||
json={
|
json={
|
||||||
'disable_rollback': False,
|
'disable_rollback': False,
|
||||||
'environment': {},
|
|
||||||
'files': {},
|
|
||||||
'parameters': {},
|
'parameters': {},
|
||||||
'stack_name': self.stack_name,
|
'stack_name': self.stack_name,
|
||||||
'tags': self.stack_tag,
|
'tags': self.stack_tag,
|
||||||
@ -364,8 +365,6 @@ class TestStack(base.TestCase):
|
|||||||
validate=dict(
|
validate=dict(
|
||||||
json={
|
json={
|
||||||
'disable_rollback': False,
|
'disable_rollback': False,
|
||||||
'environment': {},
|
|
||||||
'files': {},
|
|
||||||
'parameters': {},
|
'parameters': {},
|
||||||
'stack_name': self.stack_name,
|
'stack_name': self.stack_name,
|
||||||
'tags': self.stack_tag,
|
'tags': self.stack_tag,
|
||||||
@ -422,12 +421,11 @@ class TestStack(base.TestCase):
|
|||||||
validate=dict(
|
validate=dict(
|
||||||
json={
|
json={
|
||||||
'disable_rollback': False,
|
'disable_rollback': False,
|
||||||
'environment': {},
|
|
||||||
'files': {},
|
|
||||||
'parameters': {},
|
'parameters': {},
|
||||||
'tags': self.stack_tag,
|
'tags': self.stack_tag,
|
||||||
'template': fakes.FAKE_TEMPLATE_CONTENT,
|
'template': fakes.FAKE_TEMPLATE_CONTENT,
|
||||||
'timeout_mins': 60})),
|
'timeout_mins': 60}),
|
||||||
|
json={}),
|
||||||
dict(
|
dict(
|
||||||
method='GET',
|
method='GET',
|
||||||
uri='{endpoint}/stacks/{name}'.format(
|
uri='{endpoint}/stacks/{name}'.format(
|
||||||
@ -478,12 +476,11 @@ class TestStack(base.TestCase):
|
|||||||
validate=dict(
|
validate=dict(
|
||||||
json={
|
json={
|
||||||
'disable_rollback': False,
|
'disable_rollback': False,
|
||||||
'environment': {},
|
|
||||||
'files': {},
|
|
||||||
'parameters': {},
|
'parameters': {},
|
||||||
'tags': self.stack_tag,
|
'tags': self.stack_tag,
|
||||||
'template': fakes.FAKE_TEMPLATE_CONTENT,
|
'template': fakes.FAKE_TEMPLATE_CONTENT,
|
||||||
'timeout_mins': 60})),
|
'timeout_mins': 60}),
|
||||||
|
json={}),
|
||||||
dict(
|
dict(
|
||||||
method='GET',
|
method='GET',
|
||||||
uri='{endpoint}/stacks/{name}/events?{qs}'.format(
|
uri='{endpoint}/stacks/{name}/events?{qs}'.format(
|
||||||
|
@ -40,13 +40,40 @@ class TestOrchestrationProxy(test_proxy_base.TestProxyBase):
|
|||||||
method_kwargs=method_kwargs)
|
method_kwargs=method_kwargs)
|
||||||
|
|
||||||
def test_find_stack(self):
|
def test_find_stack(self):
|
||||||
self.verify_find(self.proxy.find_stack, stack.Stack)
|
self.verify_find(self.proxy.find_stack, stack.Stack,
|
||||||
|
expected_kwargs={'resolve_outputs': True})
|
||||||
|
# mock_method="openstack.proxy.Proxy._find"
|
||||||
|
# test_method=self.proxy.find_stack
|
||||||
|
# method_kwargs = {
|
||||||
|
# 'resolve_outputs': False,
|
||||||
|
# 'ignore_missing': False
|
||||||
|
# }
|
||||||
|
# method_args=["name_or_id"]
|
||||||
|
# self._verify2(mock_method, test_method,
|
||||||
|
# method_args=method_args,
|
||||||
|
# method_kwargs=method_kwargs,
|
||||||
|
# expected_args=[stack.Stack, "name_or_id"],
|
||||||
|
# expected_kwargs=method_kwargs,
|
||||||
|
# expected_result="result")
|
||||||
|
#
|
||||||
|
# method_kwargs = {
|
||||||
|
# 'resolve_outputs': True,
|
||||||
|
# 'ignore_missing': True
|
||||||
|
# }
|
||||||
|
# self._verify2(mock_method, test_method,
|
||||||
|
# method_args=method_args,
|
||||||
|
# method_kwargs=method_kwargs,
|
||||||
|
# expected_args=[stack.Stack, "name_or_id"],
|
||||||
|
# expected_kwargs=method_kwargs,
|
||||||
|
# expected_result="result")
|
||||||
|
|
||||||
def test_stacks(self):
|
def test_stacks(self):
|
||||||
self.verify_list(self.proxy.stacks, stack.Stack)
|
self.verify_list(self.proxy.stacks, stack.Stack)
|
||||||
|
|
||||||
def test_get_stack(self):
|
def test_get_stack(self):
|
||||||
self.verify_get(self.proxy.get_stack, stack.Stack)
|
self.verify_get(self.proxy.get_stack, stack.Stack,
|
||||||
|
method_kwargs={'resolve_outputs': False},
|
||||||
|
expected_kwargs={'resolve_outputs': False})
|
||||||
self.verify_get_overrided(
|
self.verify_get_overrided(
|
||||||
self.proxy, stack.Stack,
|
self.proxy, stack.Stack,
|
||||||
'openstack.orchestration.v1.stack.Stack')
|
'openstack.orchestration.v1.stack.Stack')
|
||||||
|
@ -13,6 +13,7 @@
|
|||||||
import mock
|
import mock
|
||||||
import six
|
import six
|
||||||
from openstack.tests.unit import base
|
from openstack.tests.unit import base
|
||||||
|
from openstack.tests.unit import test_resource
|
||||||
|
|
||||||
from openstack import exceptions
|
from openstack import exceptions
|
||||||
from openstack.orchestration.v1 import stack
|
from openstack.orchestration.v1 import stack
|
||||||
@ -24,8 +25,13 @@ FAKE_NAME = 'test_stack'
|
|||||||
FAKE = {
|
FAKE = {
|
||||||
'capabilities': '1',
|
'capabilities': '1',
|
||||||
'creation_time': '2015-03-09T12:15:57.233772',
|
'creation_time': '2015-03-09T12:15:57.233772',
|
||||||
|
'deletion_time': '2015-03-09T12:15:57.233772',
|
||||||
'description': '3',
|
'description': '3',
|
||||||
'disable_rollback': True,
|
'disable_rollback': True,
|
||||||
|
'environment': {'var1': 'val1'},
|
||||||
|
'environment_files': [],
|
||||||
|
'files': {'file1': 'content'},
|
||||||
|
'files_container': 'dummy_container',
|
||||||
'id': FAKE_ID,
|
'id': FAKE_ID,
|
||||||
'links': [{
|
'links': [{
|
||||||
'href': 'stacks/%s/%s' % (FAKE_NAME, FAKE_ID),
|
'href': 'stacks/%s/%s' % (FAKE_NAME, FAKE_ID),
|
||||||
@ -135,7 +141,12 @@ class TestStack(base.TestCase):
|
|||||||
sot = stack.Stack(**FAKE)
|
sot = stack.Stack(**FAKE)
|
||||||
self.assertEqual(FAKE['capabilities'], sot.capabilities)
|
self.assertEqual(FAKE['capabilities'], sot.capabilities)
|
||||||
self.assertEqual(FAKE['creation_time'], sot.created_at)
|
self.assertEqual(FAKE['creation_time'], sot.created_at)
|
||||||
|
self.assertEqual(FAKE['deletion_time'], sot.deleted_at)
|
||||||
self.assertEqual(FAKE['description'], sot.description)
|
self.assertEqual(FAKE['description'], sot.description)
|
||||||
|
self.assertEqual(FAKE['environment'], sot.environment)
|
||||||
|
self.assertEqual(FAKE['environment_files'], sot.environment_files)
|
||||||
|
self.assertEqual(FAKE['files'], sot.files)
|
||||||
|
self.assertEqual(FAKE['files_container'], sot.files_container)
|
||||||
self.assertTrue(sot.is_rollback_disabled)
|
self.assertTrue(sot.is_rollback_disabled)
|
||||||
self.assertEqual(FAKE['id'], sot.id)
|
self.assertEqual(FAKE['id'], sot.id)
|
||||||
self.assertEqual(FAKE['links'], sot.links)
|
self.assertEqual(FAKE['links'], sot.links)
|
||||||
@ -186,19 +197,31 @@ class TestStack(base.TestCase):
|
|||||||
|
|
||||||
sot._action.assert_called_with(sess, body)
|
sot._action.assert_called_with(sess, body)
|
||||||
|
|
||||||
@mock.patch.object(resource.Resource, 'fetch')
|
def test_fetch(self):
|
||||||
def test_fetch(self, mock_fetch):
|
|
||||||
sess = mock.Mock()
|
sess = mock.Mock()
|
||||||
|
sess.default_microversion = None
|
||||||
sot = stack.Stack(**FAKE)
|
sot = stack.Stack(**FAKE)
|
||||||
deleted_stack = mock.Mock(id=FAKE_ID, status='DELETE_COMPLETE')
|
|
||||||
normal_stack = mock.Mock(status='CREATE_COMPLETE')
|
sess.get = mock.Mock()
|
||||||
mock_fetch.side_effect = [
|
sess.get.side_effect = [
|
||||||
normal_stack,
|
test_resource.FakeResponse(
|
||||||
|
{'stack': {'stack_status': 'CREATE_COMPLETE'}}, 200),
|
||||||
|
test_resource.FakeResponse(
|
||||||
|
{'stack': {'stack_status': 'CREATE_COMPLETE'}}, 200),
|
||||||
exceptions.ResourceNotFound(message='oops'),
|
exceptions.ResourceNotFound(message='oops'),
|
||||||
deleted_stack,
|
test_resource.FakeResponse(
|
||||||
|
{'stack': {'stack_status': 'DELETE_COMPLETE'}}, 200)
|
||||||
]
|
]
|
||||||
|
|
||||||
self.assertEqual(normal_stack, sot.fetch(sess))
|
self.assertEqual(sot, sot.fetch(sess))
|
||||||
|
sess.get.assert_called_with(
|
||||||
|
'stacks/{id}'.format(id=sot.id),
|
||||||
|
microversion=None)
|
||||||
|
sot.fetch(sess, resolve_outputs=False)
|
||||||
|
sess.get.assert_called_with(
|
||||||
|
'stacks/{id}?resolve_outputs=False'.format(id=sot.id),
|
||||||
|
microversion=None)
|
||||||
ex = self.assertRaises(exceptions.ResourceNotFound, sot.fetch, sess)
|
ex = self.assertRaises(exceptions.ResourceNotFound, sot.fetch, sess)
|
||||||
self.assertEqual('oops', six.text_type(ex))
|
self.assertEqual('oops', six.text_type(ex))
|
||||||
ex = self.assertRaises(exceptions.ResourceNotFound, sot.fetch, sess)
|
ex = self.assertRaises(exceptions.ResourceNotFound, sot.fetch, sess)
|
||||||
@ -238,7 +261,7 @@ class TestStack(base.TestCase):
|
|||||||
sot.update(sess)
|
sot.update(sess)
|
||||||
|
|
||||||
sess.put.assert_called_with(
|
sess.put.assert_called_with(
|
||||||
'stacks/%s/%s' % (FAKE_NAME, FAKE_ID),
|
'/stacks/%s/%s' % (FAKE_NAME, FAKE_ID),
|
||||||
headers={},
|
headers={},
|
||||||
microversion=None,
|
microversion=None,
|
||||||
json=body
|
json=body
|
||||||
|
@ -177,7 +177,7 @@ class TestProxyBase(base.TestCase):
|
|||||||
mock_method="openstack.proxy.Proxy._find",
|
mock_method="openstack.proxy.Proxy._find",
|
||||||
path_args=None, **kwargs):
|
path_args=None, **kwargs):
|
||||||
method_args = value or ["name_or_id"]
|
method_args = value or ["name_or_id"]
|
||||||
expected_kwargs = {}
|
expected_kwargs = kwargs.pop('expected_kwargs', {})
|
||||||
|
|
||||||
self._add_path_args_for_verify(path_args, method_args, expected_kwargs,
|
self._add_path_args_for_verify(path_args, method_args, expected_kwargs,
|
||||||
value=value)
|
value=value)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user