Move dcmanager orchestration to a separate process
1) Remove DC manager orchestration from dcmanager-manager process 2) Create dcmanager-orchestrator process and associated files 3) Add new RPC calls for dcmanager-orchestrator process to notify dcmanager 4) Create/update unit tests, to verify the implementation changes Story: 2007267 Task: 40734 Change-Id: Ibbbae77558a8a8fd95b636fa6c3aebb1dfefb514 Signed-off-by: Jessica Castelino <jessica.castelino@windriver.com>
This commit is contained in:
parent
64caf6de7c
commit
eb97f4c8b6
@ -29,6 +29,7 @@ Source10: dcorch.conf
|
||||
Source11: dcdbsync.conf
|
||||
Source12: clean-dcorch
|
||||
Source13: dcmanager-audit.service
|
||||
Source14: dcmanager-orchestrator.service
|
||||
|
||||
BuildArch: noarch
|
||||
|
||||
@ -132,6 +133,7 @@ install -d -m 755 %{buildroot}%{_sysconfdir}/dcmanager/
|
||||
install -p -D -m 644 %{SOURCE1} %{buildroot}%{_unitdir}/dcmanager-api.service
|
||||
install -p -D -m 644 %{SOURCE2} %{buildroot}%{_unitdir}/dcmanager-manager.service
|
||||
install -p -D -m 644 %{SOURCE13} %{buildroot}%{_unitdir}/dcmanager-audit.service
|
||||
install -p -D -m 644 %{SOURCE14} %{buildroot}%{_unitdir}/dcmanager-orchestrator.service
|
||||
install -p -D -m 644 %{SOURCE9} %{buildroot}%{_tmpfilesdir}
|
||||
# install default config files
|
||||
cd %{_builddir}/%{pypi_name}-%{version} && oslo-config-generator --config-file ./dcmanager/config-generator.conf --output-file %{_builddir}/%{pypi_name}-%{version}%{_sysconfdir}/dcmanager/dcmanager.conf.sample
|
||||
@ -189,6 +191,8 @@ install -m 755 -D -p %{SOURCE12} %{buildroot}/%{_bindir}/clean-dcorch
|
||||
%{_unitdir}/dcmanager-api.service
|
||||
%{_bindir}/dcmanager-audit
|
||||
%{_unitdir}/dcmanager-audit.service
|
||||
%{_bindir}/dcmanager-orchestrator
|
||||
%{_unitdir}/dcmanager-orchestrator.service
|
||||
%{_bindir}/dcmanager-manager
|
||||
%{_unitdir}/dcmanager-manager.service
|
||||
%{_bindir}/dcmanager-manage
|
||||
|
12
distributedcloud/centos/files/dcmanager-orchestrator.service
Normal file
12
distributedcloud/centos/files/dcmanager-orchestrator.service
Normal file
@ -0,0 +1,12 @@
|
||||
[Unit]
|
||||
Description=DC Manager Orchestrator Service
|
||||
After=syslog-ng.service network-online.target dcmanager-manager.service
|
||||
|
||||
[Service]
|
||||
Type=simple
|
||||
User=root
|
||||
ExecStart=/usr/bin/dcmanager-orchestrator --config-file /etc/dcmanager/dcmanager.conf
|
||||
Restart=on-failure
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
@ -33,7 +33,7 @@ from dcmanager.common import consts
|
||||
from dcmanager.common import exceptions
|
||||
from dcmanager.common.i18n import _
|
||||
from dcmanager.db import api as db_api
|
||||
from dcmanager.rpc import client as rpc_client
|
||||
from dcmanager.orchestrator import rpcapi as orch_rpc_client
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger(__name__)
|
||||
@ -49,7 +49,7 @@ class SwUpdateStrategyController(object):
|
||||
|
||||
def __init__(self):
|
||||
super(SwUpdateStrategyController, self).__init__()
|
||||
self.rpc_client = rpc_client.ManagerClient()
|
||||
self.orch_rpc_client = orch_rpc_client.ManagerOrchestratorClient()
|
||||
|
||||
@expose(generic=True, template='json')
|
||||
def index(self):
|
||||
@ -172,8 +172,8 @@ class SwUpdateStrategyController(object):
|
||||
try:
|
||||
# Ask dcmanager-manager to create the strategy.
|
||||
# It will do all the real work...
|
||||
return self.rpc_client.create_sw_update_strategy(context,
|
||||
payload)
|
||||
return self.orch_rpc_client.create_sw_update_strategy(context,
|
||||
payload)
|
||||
except RemoteError as e:
|
||||
pecan.abort(422, e.value)
|
||||
except Exception as e:
|
||||
@ -191,7 +191,7 @@ class SwUpdateStrategyController(object):
|
||||
try:
|
||||
# Ask dcmanager-manager to apply the strategy.
|
||||
# It will do all the real work...
|
||||
return self.rpc_client.apply_sw_update_strategy(
|
||||
return self.orch_rpc_client.apply_sw_update_strategy(
|
||||
context,
|
||||
update_type=update_type_filter)
|
||||
except RemoteError as e:
|
||||
@ -203,7 +203,7 @@ class SwUpdateStrategyController(object):
|
||||
try:
|
||||
# Ask dcmanager-manager to abort the strategy.
|
||||
# It will do all the real work...
|
||||
return self.rpc_client.abort_sw_update_strategy(
|
||||
return self.orch_rpc_client.abort_sw_update_strategy(
|
||||
context,
|
||||
update_type=update_type_filter)
|
||||
except RemoteError as e:
|
||||
@ -223,7 +223,7 @@ class SwUpdateStrategyController(object):
|
||||
try:
|
||||
# Ask dcmanager-manager to delete the strategy.
|
||||
# It will do all the real work...
|
||||
return self.rpc_client.delete_sw_update_strategy(
|
||||
return self.orch_rpc_client.delete_sw_update_strategy(
|
||||
context,
|
||||
update_type=update_type_filter)
|
||||
except RemoteError as e:
|
||||
|
@ -12,6 +12,14 @@ manager.py:
|
||||
start Manager service
|
||||
python manager.py --config-file=/etc/dcmanager.conf
|
||||
|
||||
audit.py:
|
||||
start Audit service
|
||||
python audit.py --config-file=/etc/dcmanager.conf
|
||||
|
||||
orchestrator.py:
|
||||
start Orchestrator service
|
||||
python orchestrator.py --config-file=/etc/dcmanager.conf
|
||||
|
||||
manage.py:
|
||||
CLI interface for dcmanager database management
|
||||
dcmanager-manage --config-file /etc/dcmanager.conf db_sync
|
||||
|
62
distributedcloud/dcmanager/cmd/orchestrator.py
Normal file
62
distributedcloud/dcmanager/cmd/orchestrator.py
Normal file
@ -0,0 +1,62 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
# Copyright (c) 2020 Wind River Systems, Inc.
|
||||
#
|
||||
# The right to copy, distribute, modify, or otherwise make use
|
||||
# of this software may be licensed only pursuant to the terms
|
||||
# of an applicable Wind River license agreement.
|
||||
#
|
||||
|
||||
"""
|
||||
DC Manager Orchestrator Service.
|
||||
"""
|
||||
|
||||
import eventlet
|
||||
eventlet.monkey_patch()
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_i18n import _lazy
|
||||
from oslo_log import log as logging
|
||||
from oslo_service import service
|
||||
|
||||
from dcmanager.common import config
|
||||
from dcmanager.common import messaging
|
||||
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger('dcmanager.orchestrator')
|
||||
|
||||
|
||||
def main():
|
||||
_lazy.enable_lazy()
|
||||
config.register_options()
|
||||
config.register_keystone_options()
|
||||
logging.register_options(CONF)
|
||||
CONF(project='dcmanager', prog='dcmanager-orchestrator')
|
||||
logging.setup(CONF, 'dcmanager-orchestrator')
|
||||
logging.set_defaults()
|
||||
messaging.setup()
|
||||
|
||||
from dcmanager.orchestrator import service as orchestrator
|
||||
|
||||
srv = orchestrator.DCManagerOrchestratorService()
|
||||
launcher = service.launch(CONF,
|
||||
srv, workers=cfg.CONF.orch_workers)
|
||||
|
||||
LOG.info("Configuration:")
|
||||
CONF.log_opt_values(LOG, logging.INFO)
|
||||
|
||||
launcher.wait()
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
@ -145,6 +145,8 @@ scheduler_opts = [
|
||||
common_opts = [
|
||||
cfg.IntOpt('workers', default=1,
|
||||
help='number of workers'),
|
||||
cfg.IntOpt('orch_workers', default=1,
|
||||
help='number of orchestrator workers'),
|
||||
cfg.IntOpt('audit_workers', default=1,
|
||||
help='number of audit workers'),
|
||||
cfg.StrOpt('host',
|
||||
|
@ -25,7 +25,10 @@ TOPIC_DC_MANAGER = "dcmanager"
|
||||
|
||||
TOPIC_DC_MANAGER_AUDIT = "dcmanager-audit"
|
||||
|
||||
TOPIC_DC_MANAGER_ORCHESTRATOR = "dcmanager-orchestrator"
|
||||
|
||||
LOADS_VAULT_DIR = "/opt/dc-vault/loads"
|
||||
|
||||
PATCH_VAULT_DIR = "/opt/dc-vault/patches"
|
||||
|
||||
# Well known region names
|
||||
|
@ -36,7 +36,6 @@ from dcmanager.common.i18n import _
|
||||
from dcmanager.common import messaging as rpc_messaging
|
||||
from dcmanager.common import scheduler
|
||||
from dcmanager.manager.subcloud_manager import SubcloudManager
|
||||
from dcmanager.manager.sw_update_manager import SwUpdateManager
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger(__name__)
|
||||
@ -78,7 +77,6 @@ class DCManagerService(service.Service):
|
||||
self.target = None
|
||||
self._rpc_server = None
|
||||
self.subcloud_manager = None
|
||||
self.sw_update_manager = None
|
||||
self.audit_rpc_client = None
|
||||
|
||||
def init_tgm(self):
|
||||
@ -86,10 +84,6 @@ class DCManagerService(service.Service):
|
||||
|
||||
def init_managers(self):
|
||||
self.subcloud_manager = SubcloudManager()
|
||||
self.sw_update_manager = SwUpdateManager()
|
||||
|
||||
def stop_managers(self):
|
||||
self.sw_update_manager.stop()
|
||||
|
||||
def start(self):
|
||||
self.dcmanager_id = uuidutils.generate_uuid()
|
||||
@ -205,38 +199,6 @@ class DCManagerService(service.Service):
|
||||
self.subcloud_manager.update_subcloud_sync_endpoint_type(
|
||||
context, subcloud_name, endpoint_type_list, openstack_installed)
|
||||
|
||||
@request_context
|
||||
def create_sw_update_strategy(self, context, payload):
|
||||
# Creates a software update strategy
|
||||
LOG.info("Handling create_sw_update_strategy request of type %s" %
|
||||
payload.get('type'))
|
||||
return self.sw_update_manager.create_sw_update_strategy(
|
||||
context, payload)
|
||||
|
||||
@request_context
|
||||
def delete_sw_update_strategy(self, context, update_type=None):
|
||||
# Deletes the software update strategy
|
||||
LOG.info("Handling delete_sw_update_strategy request")
|
||||
return self.sw_update_manager.delete_sw_update_strategy(
|
||||
context,
|
||||
update_type=update_type)
|
||||
|
||||
@request_context
|
||||
def apply_sw_update_strategy(self, context, update_type=None):
|
||||
# Applies the software update strategy
|
||||
LOG.info("Handling apply_sw_update_strategy request")
|
||||
return self.sw_update_manager.apply_sw_update_strategy(
|
||||
context,
|
||||
update_type=update_type)
|
||||
|
||||
@request_context
|
||||
def abort_sw_update_strategy(self, context, update_type=None):
|
||||
# Aborts the software update strategy
|
||||
LOG.info("Handling abort_sw_update_strategy request")
|
||||
return self.sw_update_manager.abort_sw_update_strategy(
|
||||
context,
|
||||
update_type=update_type)
|
||||
|
||||
def _stop_rpc_server(self):
|
||||
# Stop RPC connection to prevent new requests
|
||||
LOG.debug(_("Attempting to stop engine service..."))
|
||||
@ -252,7 +214,6 @@ class DCManagerService(service.Service):
|
||||
self._stop_rpc_server()
|
||||
|
||||
self.TG.stop()
|
||||
self.stop_managers()
|
||||
|
||||
# Terminate the engine process
|
||||
LOG.info("All threads were gone, terminating engine")
|
||||
|
@ -30,13 +30,13 @@ from dcmanager.common import context
|
||||
from dcmanager.common import exceptions
|
||||
from dcmanager.common import scheduler
|
||||
from dcmanager.db import api as db_api
|
||||
from dcmanager.manager.states.firmware.applying_vim_strategy \
|
||||
from dcmanager.orchestrator.states.firmware.applying_vim_strategy \
|
||||
import ApplyingVIMStrategyState
|
||||
from dcmanager.manager.states.firmware.creating_vim_strategy \
|
||||
from dcmanager.orchestrator.states.firmware.creating_vim_strategy \
|
||||
import CreatingVIMStrategyState
|
||||
from dcmanager.manager.states.firmware.finishing_fw_update \
|
||||
from dcmanager.orchestrator.states.firmware.finishing_fw_update \
|
||||
import FinishingFwUpdateState
|
||||
from dcmanager.manager.states.firmware.importing_firmware \
|
||||
from dcmanager.orchestrator.states.firmware.importing_firmware \
|
||||
import ImportingFirmwareState
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
76
distributedcloud/dcmanager/orchestrator/rpcapi.py
Normal file
76
distributedcloud/dcmanager/orchestrator/rpcapi.py
Normal file
@ -0,0 +1,76 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
# not use this file except in compliance with the License. You may obtain
|
||||
# a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
# License for the specific language governing permissions and limitations
|
||||
# under the License.
|
||||
#
|
||||
# Copyright (c) 2020 Wind River Systems, Inc.
|
||||
#
|
||||
# The right to copy, distribute, modify, or otherwise make use
|
||||
# of this software may be licensed only pursuant to the terms
|
||||
# of an applicable Wind River license agreement.
|
||||
#
|
||||
|
||||
"""
|
||||
Client side of the DC Manager Orchestrator RPC API.
|
||||
"""
|
||||
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.common import messaging
|
||||
|
||||
|
||||
class ManagerOrchestratorClient(object):
|
||||
"""Client side of the DC Manager Orchestrator RPC API.
|
||||
|
||||
Version History:
|
||||
1.0 - Initial version
|
||||
"""
|
||||
|
||||
BASE_RPC_API_VERSION = '1.0'
|
||||
|
||||
def __init__(self):
|
||||
self._client = messaging.get_rpc_client(
|
||||
topic=consts.TOPIC_DC_MANAGER_ORCHESTRATOR,
|
||||
version=self.BASE_RPC_API_VERSION)
|
||||
|
||||
@staticmethod
|
||||
def make_msg(method, **kwargs):
|
||||
return method, kwargs
|
||||
|
||||
def call(self, ctxt, msg, version=None):
|
||||
method, kwargs = msg
|
||||
if version is not None:
|
||||
client = self._client.prepare(version=version)
|
||||
else:
|
||||
client = self._client
|
||||
return client.call(ctxt, method, **kwargs)
|
||||
|
||||
def cast(self, ctxt, msg, version=None):
|
||||
method, kwargs = msg
|
||||
if version is not None:
|
||||
client = self._client.prepare(version=version)
|
||||
else:
|
||||
client = self._client
|
||||
return client.cast(ctxt, method, **kwargs)
|
||||
|
||||
def create_sw_update_strategy(self, ctxt, payload):
|
||||
return self.call(ctxt, self.make_msg('create_sw_update_strategy',
|
||||
payload=payload))
|
||||
|
||||
def delete_sw_update_strategy(self, ctxt, update_type=None):
|
||||
return self.call(ctxt, self.make_msg('delete_sw_update_strategy',
|
||||
update_type=update_type))
|
||||
|
||||
def apply_sw_update_strategy(self, ctxt, update_type=None):
|
||||
return self.call(ctxt, self.make_msg('apply_sw_update_strategy',
|
||||
update_type=update_type))
|
||||
|
||||
def abort_sw_update_strategy(self, ctxt, update_type=None):
|
||||
return self.call(ctxt, self.make_msg('abort_sw_update_strategy',
|
||||
update_type=update_type))
|
135
distributedcloud/dcmanager/orchestrator/service.py
Normal file
135
distributedcloud/dcmanager/orchestrator/service.py
Normal file
@ -0,0 +1,135 @@
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
# implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# Copyright (c) 2020 Wind River Systems, Inc.
|
||||
#
|
||||
# The right to copy, distribute, modify, or otherwise make use
|
||||
# of this software may be licensed only pursuant to the terms
|
||||
# of an applicable Wind River license agreement.
|
||||
#
|
||||
|
||||
import six
|
||||
|
||||
import functools
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
import oslo_messaging
|
||||
from oslo_service import service
|
||||
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.common import context
|
||||
from dcmanager.common import exceptions
|
||||
from dcmanager.common import messaging as rpc_messaging
|
||||
from dcmanager.common import scheduler
|
||||
from dcmanager.orchestrator.sw_update_manager import SwUpdateManager
|
||||
|
||||
CONF = cfg.CONF
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def request_context(func):
|
||||
@functools.wraps(func)
|
||||
def wrapped(self, ctx, *args, **kwargs):
|
||||
if ctx is not None and not isinstance(ctx, context.RequestContext):
|
||||
ctx = context.RequestContext.from_dict(ctx.to_dict())
|
||||
try:
|
||||
return func(self, ctx, *args, **kwargs)
|
||||
except exceptions.DCManagerException:
|
||||
raise oslo_messaging.rpc.dispatcher.ExpectedException()
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
class DCManagerOrchestratorService(service.Service):
|
||||
"""Lifecycle manager for a running orchestrator service."""
|
||||
|
||||
def __init__(self):
|
||||
|
||||
super(DCManagerOrchestratorService, self).__init__()
|
||||
self.host = cfg.CONF.host
|
||||
self.rpc_api_version = consts.RPC_API_VERSION
|
||||
self.topic = consts.TOPIC_DC_MANAGER_ORCHESTRATOR
|
||||
# The following are initialized here, but assigned in start() which
|
||||
# happens after the fork when spawning multiple worker processes
|
||||
self.TG = None
|
||||
self.target = None
|
||||
self._rpc_server = None
|
||||
self.sw_update_manager = None
|
||||
|
||||
def start(self):
|
||||
self.init_tgm()
|
||||
self.init_manager()
|
||||
target = oslo_messaging.Target(version=self.rpc_api_version,
|
||||
server=self.host,
|
||||
topic=self.topic)
|
||||
self.target = target
|
||||
self._rpc_server = rpc_messaging.get_rpc_server(self.target, self)
|
||||
self._rpc_server.start()
|
||||
super(DCManagerOrchestratorService, self).start()
|
||||
|
||||
def init_tgm(self):
|
||||
self.TG = scheduler.ThreadGroupManager()
|
||||
|
||||
def init_manager(self):
|
||||
self.sw_update_manager = SwUpdateManager()
|
||||
|
||||
def _stop_rpc_server(self):
|
||||
# Stop RPC connection to prevent new requests
|
||||
LOG.debug("Attempting to stop engine service...")
|
||||
try:
|
||||
self._rpc_server.stop()
|
||||
self._rpc_server.wait()
|
||||
LOG.info('Engine service stopped successfully')
|
||||
except Exception as ex:
|
||||
LOG.error('Failed to stop engine service: %s',
|
||||
six.text_type(ex))
|
||||
|
||||
def stop(self):
|
||||
self._stop_rpc_server()
|
||||
self.TG.stop()
|
||||
self.sw_update_manager.stop()
|
||||
# Terminate the engine process
|
||||
LOG.info("All threads were gone, terminating engine")
|
||||
super(DCManagerOrchestratorService, self).stop()
|
||||
|
||||
@request_context
|
||||
def create_sw_update_strategy(self, context, payload):
|
||||
# Creates a software update strategy
|
||||
LOG.info("Handling create_sw_update_strategy request of type %s" %
|
||||
payload.get('type'))
|
||||
return self.sw_update_manager.create_sw_update_strategy(
|
||||
context, payload)
|
||||
|
||||
@request_context
|
||||
def delete_sw_update_strategy(self, context, update_type=None):
|
||||
# Deletes the software update strategy
|
||||
LOG.info("Handling delete_sw_update_strategy request")
|
||||
return self.sw_update_manager.delete_sw_update_strategy(
|
||||
context,
|
||||
update_type=update_type)
|
||||
|
||||
@request_context
|
||||
def apply_sw_update_strategy(self, context, update_type=None):
|
||||
# Applies the software update strategy
|
||||
LOG.info("Handling apply_sw_update_strategy request")
|
||||
return self.sw_update_manager.apply_sw_update_strategy(
|
||||
context,
|
||||
update_type=update_type)
|
||||
|
||||
@request_context
|
||||
def abort_sw_update_strategy(self, context, update_type=None):
|
||||
# Aborts the software update strategy
|
||||
LOG.info("Handling abort_sw_update_strategy request")
|
||||
return self.sw_update_manager.abort_sw_update_strategy(
|
||||
context,
|
||||
update_type=update_type)
|
@ -9,7 +9,7 @@ from dccommon.drivers.openstack import vim
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.common.exceptions import StrategyStoppedException
|
||||
from dcmanager.db import api as db_api
|
||||
from dcmanager.manager.states.base import BaseState
|
||||
from dcmanager.orchestrator.states.base import BaseState
|
||||
|
||||
|
||||
# Applying the vim update strategy may result in a loss of communication
|
@ -9,7 +9,7 @@ from dccommon.drivers.openstack import vim
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.common.exceptions import StrategyStoppedException
|
||||
from dcmanager.common import utils as dcmanager_utils
|
||||
from dcmanager.manager.states.base import BaseState
|
||||
from dcmanager.orchestrator.states.base import BaseState
|
||||
|
||||
# Max time: 30 minutes = 180 queries x 10 seconds between
|
||||
DEFAULT_MAX_QUERIES = 180
|
@ -5,8 +5,8 @@
|
||||
#
|
||||
from dccommon.drivers.openstack import vim
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.manager.states.base import BaseState
|
||||
from dcmanager.manager.states.firmware import utils
|
||||
from dcmanager.orchestrator.states.base import BaseState
|
||||
from dcmanager.orchestrator.states.firmware import utils
|
||||
from dcmanager.rpc import client as dcmanager_rpc_client
|
||||
from dcorch.common import consts as dcorch_consts
|
||||
|
@ -6,8 +6,8 @@
|
||||
import os
|
||||
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.manager.states.base import BaseState
|
||||
from dcmanager.manager.states.firmware import utils
|
||||
from dcmanager.orchestrator.states.base import BaseState
|
||||
from dcmanager.orchestrator.states.firmware import utils
|
||||
|
||||
|
||||
class ImportingFirmwareState(BaseState):
|
@ -7,7 +7,7 @@ import time
|
||||
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.common.exceptions import StrategyStoppedException
|
||||
from dcmanager.manager.states.base import BaseState
|
||||
from dcmanager.orchestrator.states.base import BaseState
|
||||
|
||||
# Max time: 10 minutes = 60 queries x 10 seconds
|
||||
DEFAULT_MAX_QUERIES = 60
|
@ -7,7 +7,7 @@ import time
|
||||
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.common.exceptions import StrategyStoppedException
|
||||
from dcmanager.manager.states.base import BaseState
|
||||
from dcmanager.orchestrator.states.base import BaseState
|
||||
|
||||
|
||||
# When an unlock occurs, a reboot is triggered. During reboot, API calls fail.
|
@ -7,7 +7,7 @@ import time
|
||||
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.common.exceptions import StrategyStoppedException
|
||||
from dcmanager.manager.states.base import BaseState
|
||||
from dcmanager.orchestrator.states.base import BaseState
|
||||
|
||||
|
||||
ACTIVATING_COMPLETED_STATES = ['activation-complete',
|
@ -8,7 +8,7 @@ import time
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.common.exceptions import StrategyStoppedException
|
||||
from dcmanager.db import api as db_api
|
||||
from dcmanager.manager.states.base import BaseState
|
||||
from dcmanager.orchestrator.states.base import BaseState
|
||||
|
||||
|
||||
# Max time: 10 minutes = 60 queries x 10 seconds between each query
|
@ -7,7 +7,7 @@ import time
|
||||
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.common.exceptions import StrategyStoppedException
|
||||
from dcmanager.manager.states.base import BaseState
|
||||
from dcmanager.orchestrator.states.base import BaseState
|
||||
|
||||
|
||||
# Max time: 10 minutes = 60 queries x 10 seconds between each query
|
@ -8,7 +8,7 @@ import time
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.common.exceptions import StrategyStoppedException
|
||||
from dcmanager.common import utils
|
||||
from dcmanager.manager.states.base import BaseState
|
||||
from dcmanager.orchestrator.states.base import BaseState
|
||||
|
||||
# Max time: 30 minutes = 180 queries x 10 seconds between
|
||||
DEFAULT_MAX_QUERIES = 180
|
@ -5,7 +5,7 @@
|
||||
#
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.common import exceptions
|
||||
from dcmanager.manager.states.base import BaseState
|
||||
from dcmanager.orchestrator.states.base import BaseState
|
||||
|
||||
# When a license is not installed, this will be part of the API error string
|
||||
LICENSE_FILE_NOT_FOUND_SUBSTRING = "License file not found"
|
@ -11,7 +11,7 @@ import time
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.common.exceptions import StrategyStoppedException
|
||||
from dcmanager.db import api as db_api
|
||||
from dcmanager.manager.states.base import BaseState
|
||||
from dcmanager.orchestrator.states.base import BaseState
|
||||
|
||||
|
||||
ANSIBLE_UPGRADE_PLAYBOOK = \
|
@ -11,7 +11,7 @@ from dcmanager.common import consts
|
||||
from dcmanager.common.exceptions import ManualRecoveryRequiredException
|
||||
from dcmanager.common.exceptions import PreCheckFailedException
|
||||
from dcmanager.db import api as db_api
|
||||
from dcmanager.manager.states.base import BaseState
|
||||
from dcmanager.orchestrator.states.base import BaseState
|
||||
|
||||
# These deploy states should transition to the 'upgrading' state
|
||||
VALID_UPGRADE_STATES = [consts.DEPLOY_STATE_PRE_INSTALL_FAILED,
|
@ -9,7 +9,7 @@ from dccommon.drivers.openstack.vim import ALARM_RESTRICTIONS_RELAXED
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.common.exceptions import StrategyStoppedException
|
||||
from dcmanager.common import utils
|
||||
from dcmanager.manager.states.base import BaseState
|
||||
from dcmanager.orchestrator.states.base import BaseState
|
||||
|
||||
DEFAULT_FORCE_FLAG = False
|
||||
# Max time 30 minutes = 180 attempts, with 10 seconds between
|
@ -13,7 +13,7 @@ from dccommon.subcloud_install import SubcloudInstall
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.common import utils
|
||||
from dcmanager.db import api as db_api
|
||||
from dcmanager.manager.states.base import BaseState
|
||||
from dcmanager.orchestrator.states.base import BaseState
|
||||
|
||||
from tsconfig.tsconfig import SW_VERSION
|
||||
|
@ -28,9 +28,9 @@ from dcmanager.common import consts
|
||||
from dcmanager.common import exceptions
|
||||
from dcmanager.common import manager
|
||||
from dcmanager.db import api as db_api
|
||||
from dcmanager.manager.fw_update_orch_thread import FwUpdateOrchThread
|
||||
from dcmanager.manager.patch_orch_thread import PatchOrchThread
|
||||
from dcmanager.manager.sw_upgrade_orch_thread import SwUpgradeOrchThread
|
||||
from dcmanager.orchestrator.fw_update_orch_thread import FwUpdateOrchThread
|
||||
from dcmanager.orchestrator.patch_orch_thread import PatchOrchThread
|
||||
from dcmanager.orchestrator.sw_upgrade_orch_thread import SwUpgradeOrchThread
|
||||
from dcorch.common import consts as dcorch_consts
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
@ -30,20 +30,20 @@ from dcmanager.common import context
|
||||
from dcmanager.common import exceptions
|
||||
from dcmanager.common import scheduler
|
||||
from dcmanager.db import api as db_api
|
||||
from dcmanager.manager.states.lock_host import LockHostState
|
||||
from dcmanager.manager.states.unlock_host import UnlockHostState
|
||||
from dcmanager.manager.states.upgrade.activating import ActivatingUpgradeState
|
||||
from dcmanager.manager.states.upgrade.completing import CompletingUpgradeState
|
||||
from dcmanager.manager.states.upgrade.deleting_load import DeletingLoadState
|
||||
from dcmanager.manager.states.upgrade.importing_load import ImportingLoadState
|
||||
from dcmanager.manager.states.upgrade.installing_license \
|
||||
from dcmanager.orchestrator.states.lock_host import LockHostState
|
||||
from dcmanager.orchestrator.states.unlock_host import UnlockHostState
|
||||
from dcmanager.orchestrator.states.upgrade.activating import ActivatingUpgradeState
|
||||
from dcmanager.orchestrator.states.upgrade.completing import CompletingUpgradeState
|
||||
from dcmanager.orchestrator.states.upgrade.deleting_load import DeletingLoadState
|
||||
from dcmanager.orchestrator.states.upgrade.importing_load import ImportingLoadState
|
||||
from dcmanager.orchestrator.states.upgrade.installing_license \
|
||||
import InstallingLicenseState
|
||||
from dcmanager.manager.states.upgrade.migrating_data \
|
||||
from dcmanager.orchestrator.states.upgrade.migrating_data \
|
||||
import MigratingDataState
|
||||
from dcmanager.manager.states.upgrade.pre_check import PreCheckState
|
||||
from dcmanager.manager.states.upgrade.starting_upgrade \
|
||||
from dcmanager.orchestrator.states.upgrade.pre_check import PreCheckState
|
||||
from dcmanager.orchestrator.states.upgrade.starting_upgrade \
|
||||
import StartingUpgradeState
|
||||
from dcmanager.manager.states.upgrade.upgrading_simplex \
|
||||
from dcmanager.orchestrator.states.upgrade.upgrading_simplex \
|
||||
import UpgradingSimplexState
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
@ -134,22 +134,6 @@ class ManagerClient(RPCClient):
|
||||
endpoint_type_list=endpoint_type_list,
|
||||
openstack_installed=openstack_installed))
|
||||
|
||||
def create_sw_update_strategy(self, ctxt, payload):
|
||||
return self.call(ctxt, self.make_msg('create_sw_update_strategy',
|
||||
payload=payload))
|
||||
|
||||
def delete_sw_update_strategy(self, ctxt, update_type=None):
|
||||
return self.call(ctxt, self.make_msg('delete_sw_update_strategy',
|
||||
update_type=update_type))
|
||||
|
||||
def apply_sw_update_strategy(self, ctxt, update_type=None):
|
||||
return self.call(ctxt, self.make_msg('apply_sw_update_strategy',
|
||||
update_type=update_type))
|
||||
|
||||
def abort_sw_update_strategy(self, ctxt, update_type=None):
|
||||
return self.call(ctxt, self.make_msg('abort_sw_update_strategy',
|
||||
update_type=update_type))
|
||||
|
||||
|
||||
class DCManagerNotifications(RPCClient):
|
||||
"""DC Manager Notification interface to broadcast subcloud state changed
|
||||
|
@ -27,7 +27,7 @@ import webtest
|
||||
|
||||
from dcmanager.api.controllers.v1 import sw_update_strategy
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.rpc import client as rpc_client
|
||||
from dcmanager.orchestrator import rpcapi as rpc_client
|
||||
from dcmanager.tests.unit.api import test_root_controller as testroot
|
||||
from dcmanager.tests import utils
|
||||
|
||||
@ -57,7 +57,7 @@ class TestSwUpdateStrategy(testroot.DCManagerApiTest):
|
||||
super(TestSwUpdateStrategy, self).setUp()
|
||||
self.ctx = utils.dummy_context()
|
||||
|
||||
@mock.patch.object(rpc_client, 'ManagerClient')
|
||||
@mock.patch.object(rpc_client, 'ManagerOrchestratorClient')
|
||||
@mock.patch.object(sw_update_strategy, 'db_api')
|
||||
def test_post_sw_update(self, mock_db_api, mock_rpc_client):
|
||||
data = FAKE_SW_UPDATE_DATA
|
||||
@ -70,7 +70,7 @@ class TestSwUpdateStrategy(testroot.DCManagerApiTest):
|
||||
data)
|
||||
self.assertEqual(response.status_int, 200)
|
||||
|
||||
@mock.patch.object(rpc_client, 'ManagerClient')
|
||||
@mock.patch.object(rpc_client, 'ManagerOrchestratorClient')
|
||||
@mock.patch.object(sw_update_strategy, 'db_api')
|
||||
def test_post_sw_update_with_force_option(self, mock_db_api, mock_rpc_client):
|
||||
data = copy.copy(FAKE_SW_UPDATE_DATA)
|
||||
@ -85,7 +85,7 @@ class TestSwUpdateStrategy(testroot.DCManagerApiTest):
|
||||
data)
|
||||
self.assertEqual(response.status_int, 200)
|
||||
|
||||
@mock.patch.object(rpc_client, 'ManagerClient')
|
||||
@mock.patch.object(rpc_client, 'ManagerOrchestratorClient')
|
||||
@mock.patch.object(sw_update_strategy, 'db_api')
|
||||
def test_post_sw_update_bad_type(self, mock_db_api, mock_rpc_client):
|
||||
data = copy.copy(FAKE_SW_UPDATE_DATA)
|
||||
@ -94,7 +94,7 @@ class TestSwUpdateStrategy(testroot.DCManagerApiTest):
|
||||
self.app.post_json, FAKE_URL,
|
||||
headers=FAKE_HEADERS, params=data)
|
||||
|
||||
@mock.patch.object(rpc_client, 'ManagerClient')
|
||||
@mock.patch.object(rpc_client, 'ManagerOrchestratorClient')
|
||||
@mock.patch.object(sw_update_strategy, 'db_api')
|
||||
def test_post_sw_update_bad_apply_type(self, mock_db_api, mock_rpc_client):
|
||||
data = copy.copy(FAKE_SW_UPDATE_DATA)
|
||||
@ -103,7 +103,7 @@ class TestSwUpdateStrategy(testroot.DCManagerApiTest):
|
||||
self.app.post_json, FAKE_URL,
|
||||
headers=FAKE_HEADERS, params=data)
|
||||
|
||||
@mock.patch.object(rpc_client, 'ManagerClient')
|
||||
@mock.patch.object(rpc_client, 'ManagerOrchestratorClient')
|
||||
@mock.patch.object(sw_update_strategy, 'db_api')
|
||||
def test_post_sw_update_bad_max_parallel(
|
||||
self, mock_db_api, mock_rpc_client):
|
||||
@ -113,7 +113,7 @@ class TestSwUpdateStrategy(testroot.DCManagerApiTest):
|
||||
self.app.post_json, FAKE_URL,
|
||||
headers=FAKE_HEADERS, params=data)
|
||||
|
||||
@mock.patch.object(rpc_client, 'ManagerClient')
|
||||
@mock.patch.object(rpc_client, 'ManagerOrchestratorClient')
|
||||
@mock.patch.object(sw_update_strategy, 'db_api')
|
||||
def test_post_sw_update_invalid_stop_on_failure_type(
|
||||
self, mock_db_api, mock_rpc_client):
|
||||
@ -123,7 +123,7 @@ class TestSwUpdateStrategy(testroot.DCManagerApiTest):
|
||||
self.app.post_json, FAKE_URL,
|
||||
headers=FAKE_HEADERS, params=data)
|
||||
|
||||
@mock.patch.object(rpc_client, 'ManagerClient')
|
||||
@mock.patch.object(rpc_client, 'ManagerOrchestratorClient')
|
||||
@mock.patch.object(sw_update_strategy, 'db_api')
|
||||
def test_post_sw_update_invalid_force_type(
|
||||
self, mock_db_api, mock_rpc_client):
|
||||
@ -133,7 +133,7 @@ class TestSwUpdateStrategy(testroot.DCManagerApiTest):
|
||||
self.app.post_json, FAKE_URL,
|
||||
headers=FAKE_HEADERS, params=data)
|
||||
|
||||
@mock.patch.object(rpc_client, 'ManagerClient')
|
||||
@mock.patch.object(rpc_client, 'ManagerOrchestratorClient')
|
||||
@mock.patch.object(sw_update_strategy, 'db_api')
|
||||
def test_post_sw_update_valid_force_type_missing_cloud_name(
|
||||
self, mock_db_api, mock_rpc_client):
|
||||
@ -143,14 +143,14 @@ class TestSwUpdateStrategy(testroot.DCManagerApiTest):
|
||||
self.app.post_json, FAKE_URL,
|
||||
headers=FAKE_HEADERS, params=data)
|
||||
|
||||
@mock.patch.object(rpc_client, 'ManagerClient')
|
||||
@mock.patch.object(rpc_client, 'ManagerOrchestratorClient')
|
||||
def test_post_no_body(self, mock_rpc_client):
|
||||
data = {}
|
||||
six.assertRaisesRegex(self, webtest.app.AppError, "400 *",
|
||||
self.app.post_json, FAKE_URL,
|
||||
headers=FAKE_HEADERS, params=data)
|
||||
|
||||
@mock.patch.object(rpc_client, 'ManagerClient')
|
||||
@mock.patch.object(rpc_client, 'ManagerOrchestratorClient')
|
||||
def test_post_no_type(self, mock_rpc_client):
|
||||
data = copy.copy(FAKE_SW_UPDATE_DATA)
|
||||
del data['type']
|
||||
@ -158,7 +158,7 @@ class TestSwUpdateStrategy(testroot.DCManagerApiTest):
|
||||
self.app.post_json, FAKE_URL,
|
||||
headers=FAKE_HEADERS, params=data)
|
||||
|
||||
@mock.patch.object(rpc_client, 'ManagerClient')
|
||||
@mock.patch.object(rpc_client, 'ManagerOrchestratorClient')
|
||||
@mock.patch.object(sw_update_strategy, 'db_api')
|
||||
def test_post_sw_update_apply(self, mock_db_api, mock_rpc_client):
|
||||
data = FAKE_SW_UPDATE_APPLY_DATA
|
||||
@ -169,7 +169,7 @@ class TestSwUpdateStrategy(testroot.DCManagerApiTest):
|
||||
mock_rpc_client().apply_sw_update_strategy.assert_called_once()
|
||||
self.assertEqual(response.status_int, 200)
|
||||
|
||||
@mock.patch.object(rpc_client, 'ManagerClient')
|
||||
@mock.patch.object(rpc_client, 'ManagerOrchestratorClient')
|
||||
@mock.patch.object(sw_update_strategy, 'db_api')
|
||||
def test_scoped_post_sw_update_apply(self, mock_db_api, mock_rpc_client):
|
||||
data = FAKE_SW_UPDATE_APPLY_DATA
|
||||
@ -181,7 +181,7 @@ class TestSwUpdateStrategy(testroot.DCManagerApiTest):
|
||||
mock_rpc_client().apply_sw_update_strategy.assert_called_once()
|
||||
self.assertEqual(response.status_int, 200)
|
||||
|
||||
@mock.patch.object(rpc_client, 'ManagerClient')
|
||||
@mock.patch.object(rpc_client, 'ManagerOrchestratorClient')
|
||||
@mock.patch.object(sw_update_strategy, 'db_api')
|
||||
def test_post_sw_update_abort(self, mock_db_api, mock_rpc_client):
|
||||
mock_rpc_client().abort_sw_update_strategy.return_value = True
|
||||
@ -192,7 +192,7 @@ class TestSwUpdateStrategy(testroot.DCManagerApiTest):
|
||||
mock_rpc_client().abort_sw_update_strategy.assert_called_once()
|
||||
self.assertEqual(response.status_int, 200)
|
||||
|
||||
@mock.patch.object(rpc_client, 'ManagerClient')
|
||||
@mock.patch.object(rpc_client, 'ManagerOrchestratorClient')
|
||||
@mock.patch.object(sw_update_strategy, 'db_api')
|
||||
def test_scoped_post_sw_update_abort(self, mock_db_api, mock_rpc_client):
|
||||
mock_rpc_client().abort_sw_update_strategy.return_value = True
|
||||
@ -204,7 +204,7 @@ class TestSwUpdateStrategy(testroot.DCManagerApiTest):
|
||||
mock_rpc_client().abort_sw_update_strategy.assert_called_once()
|
||||
self.assertEqual(response.status_int, 200)
|
||||
|
||||
@mock.patch.object(rpc_client, 'ManagerClient')
|
||||
@mock.patch.object(rpc_client, 'ManagerOrchestratorClient')
|
||||
@mock.patch.object(sw_update_strategy, 'db_api')
|
||||
def test_post_sw_update_bad_action(self, mock_db_api, mock_rpc_client):
|
||||
data = copy.copy(FAKE_SW_UPDATE_APPLY_DATA)
|
||||
@ -213,7 +213,7 @@ class TestSwUpdateStrategy(testroot.DCManagerApiTest):
|
||||
self.app.post_json, FAKE_URL,
|
||||
headers=FAKE_HEADERS, params=data)
|
||||
|
||||
@mock.patch.object(rpc_client, 'ManagerClient')
|
||||
@mock.patch.object(rpc_client, 'ManagerOrchestratorClient')
|
||||
@mock.patch.object(sw_update_strategy, 'db_api')
|
||||
def test_delete_sw_update_strategy(self, mock_db_api, mock_rpc_client):
|
||||
delete_url = FAKE_URL
|
||||
@ -223,7 +223,7 @@ class TestSwUpdateStrategy(testroot.DCManagerApiTest):
|
||||
mock.ANY, update_type=None)
|
||||
self.assertEqual(response.status_int, 200)
|
||||
|
||||
@mock.patch.object(rpc_client, 'ManagerClient')
|
||||
@mock.patch.object(rpc_client, 'ManagerOrchestratorClient')
|
||||
@mock.patch.object(sw_update_strategy, 'db_api')
|
||||
def test_scoped_delete_sw_update_strategy(self,
|
||||
mock_db_api,
|
||||
@ -235,7 +235,7 @@ class TestSwUpdateStrategy(testroot.DCManagerApiTest):
|
||||
mock.ANY, update_type=consts.SW_UPDATE_TYPE_PATCH)
|
||||
self.assertEqual(response.status_int, 200)
|
||||
|
||||
@mock.patch.object(rpc_client, 'ManagerClient')
|
||||
@mock.patch.object(rpc_client, 'ManagerOrchestratorClient')
|
||||
@mock.patch.object(sw_update_strategy, 'db_api')
|
||||
def test_get_sw_update_strategy(self, mock_db_api, mock_rpc_client):
|
||||
get_url = FAKE_URL
|
||||
@ -243,7 +243,7 @@ class TestSwUpdateStrategy(testroot.DCManagerApiTest):
|
||||
self.app.get(get_url, headers=FAKE_HEADERS)
|
||||
self.assertEqual(1, mock_db_api.sw_update_strategy_get.call_count)
|
||||
|
||||
@mock.patch.object(rpc_client, 'ManagerClient')
|
||||
@mock.patch.object(rpc_client, 'ManagerOrchestratorClient')
|
||||
@mock.patch.object(sw_update_strategy, 'db_api')
|
||||
def test_scoped_get_sw_update_strategy(self, mock_db_api, mock_rpc_client):
|
||||
get_url = FAKE_URL + '?type=' + consts.SW_UPDATE_TYPE_PATCH
|
||||
@ -251,14 +251,14 @@ class TestSwUpdateStrategy(testroot.DCManagerApiTest):
|
||||
self.app.get(get_url, headers=FAKE_HEADERS)
|
||||
self.assertEqual(1, mock_db_api.sw_update_strategy_get.call_count)
|
||||
|
||||
@mock.patch.object(rpc_client, 'ManagerClient')
|
||||
@mock.patch.object(rpc_client, 'ManagerOrchestratorClient')
|
||||
@mock.patch.object(sw_update_strategy, 'db_api')
|
||||
def test_get_sw_update_strategy_steps(self, mock_db_api, mock_rpc_client):
|
||||
get_url = FAKE_URL + '/steps'
|
||||
self.app.get(get_url, headers=FAKE_HEADERS)
|
||||
self.assertEqual(1, mock_db_api.strategy_step_get_all.call_count)
|
||||
|
||||
@mock.patch.object(rpc_client, 'ManagerClient')
|
||||
@mock.patch.object(rpc_client, 'ManagerOrchestratorClient')
|
||||
@mock.patch.object(sw_update_strategy, 'db_api')
|
||||
def test_get_sw_update_strategy_single_step(
|
||||
self, mock_db_api, mock_rpc_client):
|
||||
|
@ -68,27 +68,21 @@ class TestDCManagerService(base.DCManagerTestCase):
|
||||
self.service_obj.init_tgm()
|
||||
self.assertIsNotNone(self.service_obj.TG)
|
||||
|
||||
@mock.patch.object(service, 'SwUpdateManager')
|
||||
@mock.patch.object(service, 'SubcloudManager')
|
||||
def test_init_managers(self, mock_subcloud_manager,
|
||||
mock_sw_update_manager):
|
||||
def test_init_managers(self, mock_subcloud_manager):
|
||||
self.service_obj.init_managers()
|
||||
self.assertIsNotNone(self.service_obj.subcloud_manager)
|
||||
self.assertIsNotNone(self.service_obj.sw_update_manager)
|
||||
|
||||
@mock.patch.object(service, 'SwUpdateManager')
|
||||
@mock.patch.object(service, 'SubcloudManager')
|
||||
@mock.patch.object(service, 'rpc_messaging')
|
||||
def test_start(self, mock_rpc, mock_subcloud_manager,
|
||||
mock_sw_update_manager):
|
||||
def test_start(self, mock_rpc, mock_subcloud_manager):
|
||||
self.service_obj.start()
|
||||
mock_rpc.get_rpc_server.assert_called_once_with(
|
||||
self.service_obj.target, self.service_obj)
|
||||
mock_rpc.get_rpc_server().start.assert_called_once_with()
|
||||
|
||||
@mock.patch.object(service, 'SwUpdateManager')
|
||||
@mock.patch.object(service, 'SubcloudManager')
|
||||
def test_add_subcloud(self, mock_subcloud_manager, mock_sw_update_manager):
|
||||
def test_add_subcloud(self, mock_subcloud_manager):
|
||||
self.service_obj.init_tgm()
|
||||
self.service_obj.init_managers()
|
||||
self.service_obj.add_subcloud(
|
||||
@ -96,10 +90,8 @@ class TestDCManagerService(base.DCManagerTestCase):
|
||||
mock_subcloud_manager().add_subcloud.\
|
||||
assert_called_once_with(self.context, mock.ANY)
|
||||
|
||||
@mock.patch.object(service, 'SwUpdateManager')
|
||||
@mock.patch.object(service, 'SubcloudManager')
|
||||
def test_delete_subcloud(self, mock_subcloud_manager,
|
||||
mock_sw_update_manager):
|
||||
def test_delete_subcloud(self, mock_subcloud_manager):
|
||||
self.service_obj.init_tgm()
|
||||
self.service_obj.init_managers()
|
||||
self.service_obj.delete_subcloud(
|
||||
@ -107,10 +99,8 @@ class TestDCManagerService(base.DCManagerTestCase):
|
||||
mock_subcloud_manager().delete_subcloud.\
|
||||
assert_called_once_with(self.context, mock.ANY)
|
||||
|
||||
@mock.patch.object(service, 'SwUpdateManager')
|
||||
@mock.patch.object(service, 'SubcloudManager')
|
||||
def test_update_subcloud(self, mock_subcloud_manager,
|
||||
mock_sw_update_manager):
|
||||
def test_update_subcloud(self, mock_subcloud_manager):
|
||||
self.service_obj.init_tgm()
|
||||
self.service_obj.init_managers()
|
||||
self.service_obj.update_subcloud(
|
||||
@ -121,20 +111,16 @@ class TestDCManagerService(base.DCManagerTestCase):
|
||||
mock.ANY, mock.ANY,
|
||||
mock.ANY, mock.ANY)
|
||||
|
||||
@mock.patch.object(service, 'SwUpdateManager')
|
||||
@mock.patch.object(service, 'SubcloudManager')
|
||||
@mock.patch.object(service, 'rpc_messaging')
|
||||
def test_stop_rpc_server(self, mock_rpc, mock_subcloud_manager,
|
||||
mock_sw_update_manager):
|
||||
def test_stop_rpc_server(self, mock_rpc, mock_subcloud_manager):
|
||||
self.service_obj.start()
|
||||
self.service_obj._stop_rpc_server()
|
||||
mock_rpc.get_rpc_server().stop.assert_called_once_with()
|
||||
|
||||
@mock.patch.object(service, 'SwUpdateManager')
|
||||
@mock.patch.object(service, 'SubcloudManager')
|
||||
@mock.patch.object(service, 'rpc_messaging')
|
||||
def test_stop(self, mock_rpc, mock_subcloud_manager,
|
||||
mock_sw_update_manager):
|
||||
def test_stop(self, mock_rpc, mock_subcloud_manager):
|
||||
self.service_obj.start()
|
||||
self.service_obj.stop()
|
||||
mock_rpc.get_rpc_server().stop.assert_called_once_with()
|
||||
|
@ -8,10 +8,10 @@ import mock
|
||||
|
||||
from dccommon.drivers.openstack import vim
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.manager.states.firmware import applying_vim_strategy
|
||||
from dcmanager.orchestrator.states.firmware import applying_vim_strategy
|
||||
|
||||
from dcmanager.tests.unit.manager.states.fakes import FakeVimStrategy
|
||||
from dcmanager.tests.unit.manager.states.firmware.test_base \
|
||||
from dcmanager.tests.unit.orchestrator.states.fakes import FakeVimStrategy
|
||||
from dcmanager.tests.unit.orchestrator.states.firmware.test_base \
|
||||
import TestFwUpdateState
|
||||
|
||||
STRATEGY_READY_TO_APPLY = FakeVimStrategy(state=vim.STATE_READY_TO_APPLY)
|
||||
@ -20,11 +20,11 @@ STRATEGY_APPLIED = FakeVimStrategy(state=vim.STATE_APPLIED)
|
||||
STRATEGY_APPLY_FAILED = FakeVimStrategy(vim.STATE_APPLY_FAILED)
|
||||
|
||||
|
||||
@mock.patch("dcmanager.manager.states.firmware.applying_vim_strategy."
|
||||
@mock.patch("dcmanager.orchestrator.states.firmware.applying_vim_strategy."
|
||||
"DEFAULT_MAX_FAILED_QUERIES", 3)
|
||||
@mock.patch("dcmanager.manager.states.firmware.applying_vim_strategy."
|
||||
@mock.patch("dcmanager.orchestrator.states.firmware.applying_vim_strategy."
|
||||
"DEFAULT_MAX_WAIT_ATTEMPTS", 3)
|
||||
@mock.patch("dcmanager.manager.states.firmware.applying_vim_strategy."
|
||||
@mock.patch("dcmanager.orchestrator.states.firmware.applying_vim_strategy."
|
||||
"WAIT_INTERVAL", 1)
|
||||
class TestFwUpdateApplyingVIMStrategyStage(TestFwUpdateState):
|
||||
|
@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.tests.unit.manager.states.test_base import TestSwUpdate
|
||||
from dcmanager.tests.unit.orchestrator.states.test_base import TestSwUpdate
|
||||
|
||||
|
||||
class TestFwUpdateState(TestSwUpdate):
|
@ -9,10 +9,10 @@ import mock
|
||||
|
||||
from dccommon.drivers.openstack import vim
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.manager.states.firmware import creating_vim_strategy
|
||||
from dcmanager.orchestrator.states.firmware import creating_vim_strategy
|
||||
|
||||
from dcmanager.tests.unit.manager.states.fakes import FakeVimStrategy
|
||||
from dcmanager.tests.unit.manager.states.firmware.test_base \
|
||||
from dcmanager.tests.unit.orchestrator.states.fakes import FakeVimStrategy
|
||||
from dcmanager.tests.unit.orchestrator.states.firmware.test_base \
|
||||
import TestFwUpdateState
|
||||
|
||||
STRATEGY_BUILDING = FakeVimStrategy(state=vim.STATE_BUILDING)
|
||||
@ -20,9 +20,9 @@ STRATEGY_DONE_BUILDING = FakeVimStrategy(state=vim.STATE_READY_TO_APPLY)
|
||||
STRATEGY_FAILED_BUILDING = FakeVimStrategy(vim.STATE_BUILD_FAILED)
|
||||
|
||||
|
||||
@mock.patch("dcmanager.manager.states.firmware.creating_vim_strategy."
|
||||
@mock.patch("dcmanager.orchestrator.states.firmware.creating_vim_strategy."
|
||||
"DEFAULT_MAX_QUERIES", 3)
|
||||
@mock.patch("dcmanager.manager.states.firmware.creating_vim_strategy."
|
||||
@mock.patch("dcmanager.orchestrator.states.firmware.creating_vim_strategy."
|
||||
"DEFAULT_SLEEP_DURATION", 1)
|
||||
class TestFwUpdateCreatingVIMStrategyStage(TestFwUpdateState):
|
||||
|
@ -7,10 +7,10 @@ import mock
|
||||
|
||||
from dccommon.drivers.openstack import vim
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.manager.states.firmware.finishing_fw_update import FinishingFwUpdateState
|
||||
from dcmanager.orchestrator.states.firmware.finishing_fw_update import FinishingFwUpdateState
|
||||
|
||||
from dcmanager.tests.unit.manager.states.fakes import FakeVimStrategy
|
||||
from dcmanager.tests.unit.manager.states.firmware.test_base \
|
||||
from dcmanager.tests.unit.orchestrator.states.fakes import FakeVimStrategy
|
||||
from dcmanager.tests.unit.orchestrator.states.firmware.test_base \
|
||||
import TestFwUpdateState
|
||||
|
||||
STRATEGY_APPLIED = FakeVimStrategy(state=vim.STATE_APPLIED)
|
@ -8,11 +8,11 @@ import uuid
|
||||
|
||||
from dcmanager.common import consts
|
||||
|
||||
from dcmanager.tests.unit.manager.states.fakes import FakeController
|
||||
from dcmanager.tests.unit.manager.states.fakes import FakeDevice
|
||||
from dcmanager.tests.unit.manager.states.fakes import FakeDeviceImage
|
||||
from dcmanager.tests.unit.manager.states.fakes import FakeDeviceLabel
|
||||
from dcmanager.tests.unit.manager.states.firmware.test_base \
|
||||
from dcmanager.tests.unit.orchestrator.states.fakes import FakeController
|
||||
from dcmanager.tests.unit.orchestrator.states.fakes import FakeDevice
|
||||
from dcmanager.tests.unit.orchestrator.states.fakes import FakeDeviceImage
|
||||
from dcmanager.tests.unit.orchestrator.states.fakes import FakeDeviceLabel
|
||||
from dcmanager.tests.unit.orchestrator.states.firmware.test_base \
|
||||
import TestFwUpdateState
|
||||
|
||||
VENDOR_1 = '1001'
|
@ -23,20 +23,20 @@ from oslo_config import cfg
|
||||
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.common import context
|
||||
from dcmanager.manager import fw_update_orch_thread
|
||||
from dcmanager.manager import patch_orch_thread
|
||||
from dcmanager.manager.states.base import BaseState
|
||||
from dcmanager.manager import sw_update_manager
|
||||
from dcmanager.manager import sw_upgrade_orch_thread
|
||||
from dcmanager.orchestrator import fw_update_orch_thread
|
||||
from dcmanager.orchestrator import patch_orch_thread
|
||||
from dcmanager.orchestrator.states.base import BaseState
|
||||
from dcmanager.orchestrator import sw_update_manager
|
||||
from dcmanager.orchestrator import sw_upgrade_orch_thread
|
||||
|
||||
from dcmanager.tests import base
|
||||
from dcmanager.tests.unit.manager.states.fakes import FakeKeystoneClient
|
||||
from dcmanager.tests.unit.manager.states.fakes import FakeSysinvClient
|
||||
from dcmanager.tests.unit.manager.states.fakes import FakeVimClient
|
||||
from dcmanager.tests.unit.manager.test_sw_update_manager import FakeOrchThread
|
||||
from dcmanager.tests.unit.manager.test_sw_update_manager \
|
||||
from dcmanager.tests.unit.orchestrator.states.fakes import FakeKeystoneClient
|
||||
from dcmanager.tests.unit.orchestrator.states.fakes import FakeSysinvClient
|
||||
from dcmanager.tests.unit.orchestrator.states.fakes import FakeVimClient
|
||||
from dcmanager.tests.unit.orchestrator.test_sw_update_manager import FakeOrchThread
|
||||
from dcmanager.tests.unit.orchestrator.test_sw_update_manager \
|
||||
import StrategyStep
|
||||
from dcmanager.tests.unit.manager.test_sw_update_manager \
|
||||
from dcmanager.tests.unit.orchestrator.test_sw_update_manager \
|
||||
import Subcloud
|
||||
from dcmanager.tests import utils
|
||||
|
@ -8,9 +8,9 @@ import mock
|
||||
|
||||
from dcmanager.common import consts
|
||||
|
||||
from dcmanager.manager.states.upgrade import activating
|
||||
from dcmanager.tests.unit.manager.states.fakes import FakeUpgrade
|
||||
from dcmanager.tests.unit.manager.states.upgrade.test_base \
|
||||
from dcmanager.orchestrator.states.upgrade import activating
|
||||
from dcmanager.tests.unit.orchestrator.states.fakes import FakeUpgrade
|
||||
from dcmanager.tests.unit.orchestrator.states.upgrade.test_base \
|
||||
import TestSwUpgradeState
|
||||
|
||||
VALID_UPGRADE = FakeUpgrade(state='imported')
|
||||
@ -19,9 +19,9 @@ ACTIVATING_FAILED = FakeUpgrade(state='activation-failed')
|
||||
ALREADY_ACTIVATED_UPGRADE = FakeUpgrade(state='activation-complete')
|
||||
|
||||
|
||||
@mock.patch("dcmanager.manager.states.upgrade.activating.DEFAULT_MAX_QUERIES",
|
||||
@mock.patch("dcmanager.orchestrator.states.upgrade.activating.DEFAULT_MAX_QUERIES",
|
||||
5)
|
||||
@mock.patch("dcmanager.manager.states.upgrade.activating.DEFAULT_SLEEP_DURATION",
|
||||
@mock.patch("dcmanager.orchestrator.states.upgrade.activating.DEFAULT_SLEEP_DURATION",
|
||||
1)
|
||||
class TestSwUpgradeActivatingStage(TestSwUpgradeState):
|
||||
|
@ -4,7 +4,7 @@
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.tests.unit.manager.states.test_base import TestSwUpdate
|
||||
from dcmanager.tests.unit.orchestrator.states.test_base import TestSwUpdate
|
||||
|
||||
|
||||
class TestSwUpgradeState(TestSwUpdate):
|
@ -6,11 +6,11 @@
|
||||
import mock
|
||||
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.manager.states.upgrade import completing
|
||||
from dcmanager.orchestrator.states.upgrade import completing
|
||||
|
||||
from dcmanager.tests.unit.manager.states.fakes import FakeSystem
|
||||
from dcmanager.tests.unit.manager.states.fakes import FakeUpgrade
|
||||
from dcmanager.tests.unit.manager.states.upgrade.test_base \
|
||||
from dcmanager.tests.unit.orchestrator.states.fakes import FakeSystem
|
||||
from dcmanager.tests.unit.orchestrator.states.fakes import FakeUpgrade
|
||||
from dcmanager.tests.unit.orchestrator.states.upgrade.test_base \
|
||||
import TestSwUpgradeState
|
||||
|
||||
VALID_UPGRADE = FakeUpgrade(state='activation-complete')
|
||||
@ -18,9 +18,9 @@ INVALID_UPGRADE = FakeUpgrade(state='aborting')
|
||||
UPGRADE_COMPLETING = FakeUpgrade(state='completing')
|
||||
|
||||
|
||||
@mock.patch("dcmanager.manager.states.upgrade.completing.DEFAULT_MAX_QUERIES",
|
||||
@mock.patch("dcmanager.orchestrator.states.upgrade.completing.DEFAULT_MAX_QUERIES",
|
||||
3)
|
||||
@mock.patch("dcmanager.manager.states.upgrade.completing.DEFAULT_SLEEP_DURATION",
|
||||
@mock.patch("dcmanager.orchestrator.states.upgrade.completing.DEFAULT_SLEEP_DURATION",
|
||||
1)
|
||||
class TestSwUpgradeCompletingStage(TestSwUpgradeState):
|
||||
|
@ -8,14 +8,14 @@ import mock
|
||||
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.common.exceptions import VaultLoadMissingError
|
||||
from dcmanager.manager.states.upgrade import importing_load
|
||||
from dcmanager.orchestrator.states.upgrade import importing_load
|
||||
|
||||
from dcmanager.tests.unit.manager.states.fakes import FakeLoad
|
||||
from dcmanager.tests.unit.manager.states.fakes import FakeSystem
|
||||
from dcmanager.tests.unit.manager.states.fakes import PREVIOUS_PREVIOUS_VERSION
|
||||
from dcmanager.tests.unit.manager.states.fakes import PREVIOUS_VERSION
|
||||
from dcmanager.tests.unit.manager.states.fakes import UPGRADED_VERSION
|
||||
from dcmanager.tests.unit.manager.states.upgrade.test_base \
|
||||
from dcmanager.tests.unit.orchestrator.states.fakes import FakeLoad
|
||||
from dcmanager.tests.unit.orchestrator.states.fakes import FakeSystem
|
||||
from dcmanager.tests.unit.orchestrator.states.fakes import PREVIOUS_PREVIOUS_VERSION
|
||||
from dcmanager.tests.unit.orchestrator.states.fakes import PREVIOUS_VERSION
|
||||
from dcmanager.tests.unit.orchestrator.states.fakes import UPGRADED_VERSION
|
||||
from dcmanager.tests.unit.orchestrator.states.upgrade.test_base \
|
||||
import TestSwUpgradeState
|
||||
|
||||
|
||||
@ -90,9 +90,9 @@ SUCCESS_DELETE_RESPONSE = {
|
||||
}
|
||||
|
||||
|
||||
@mock.patch("dcmanager.manager.states.upgrade.importing_load."
|
||||
@mock.patch("dcmanager.orchestrator.states.upgrade.importing_load."
|
||||
"DEFAULT_MAX_QUERIES", 3)
|
||||
@mock.patch("dcmanager.manager.states.upgrade.importing_load."
|
||||
@mock.patch("dcmanager.orchestrator.states.upgrade.importing_load."
|
||||
"DEFAULT_SLEEP_DURATION", 1)
|
||||
class TestSwUpgradeImportingLoadStage(TestSwUpgradeState):
|
||||
|
@ -7,7 +7,7 @@ import mock
|
||||
|
||||
from dcmanager.common import consts
|
||||
|
||||
from dcmanager.tests.unit.manager.states.upgrade.test_base \
|
||||
from dcmanager.tests.unit.orchestrator.states.upgrade.test_base \
|
||||
import TestSwUpgradeState
|
||||
|
||||
MISSING_LICENSE_RESPONSE = {
|
@ -7,10 +7,10 @@ import itertools
|
||||
import mock
|
||||
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.manager.states import lock_host
|
||||
from dcmanager.orchestrator.states import lock_host
|
||||
|
||||
from dcmanager.tests.unit.manager.states.fakes import FakeController
|
||||
from dcmanager.tests.unit.manager.states.upgrade.test_base \
|
||||
from dcmanager.tests.unit.orchestrator.states.fakes import FakeController
|
||||
from dcmanager.tests.unit.orchestrator.states.upgrade.test_base \
|
||||
import TestSwUpgradeState
|
||||
|
||||
CONTROLLER_0_UNLOCKED = FakeController(administrative=consts.ADMIN_UNLOCKED)
|
||||
@ -24,8 +24,8 @@ CONTROLLER_0_LOCKING_FAILED = \
|
||||
task='Swacting')
|
||||
|
||||
|
||||
@mock.patch("dcmanager.manager.states.lock_host.DEFAULT_MAX_QUERIES", 3)
|
||||
@mock.patch("dcmanager.manager.states.lock_host.DEFAULT_SLEEP_DURATION", 1)
|
||||
@mock.patch("dcmanager.orchestrator.states.lock_host.DEFAULT_MAX_QUERIES", 3)
|
||||
@mock.patch("dcmanager.orchestrator.states.lock_host.DEFAULT_SLEEP_DURATION", 1)
|
||||
class TestSwUpgradeLockControllerStage(TestSwUpgradeState):
|
||||
|
||||
def setUp(self):
|
@ -6,11 +6,11 @@
|
||||
import mock
|
||||
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.manager.states.upgrade import migrating_data
|
||||
from dcmanager.orchestrator.states.upgrade import migrating_data
|
||||
|
||||
from dcmanager.tests.unit.manager.states.fakes import FakeController
|
||||
from dcmanager.tests.unit.manager.states.fakes import FakeSubcloud
|
||||
from dcmanager.tests.unit.manager.states.upgrade.test_base \
|
||||
from dcmanager.tests.unit.orchestrator.states.fakes import FakeController
|
||||
from dcmanager.tests.unit.orchestrator.states.fakes import FakeSubcloud
|
||||
from dcmanager.tests.unit.orchestrator.states.upgrade.test_base \
|
||||
import TestSwUpgradeState
|
||||
|
||||
CONTROLLER_0_LOCKED = FakeController(administrative=consts.ADMIN_LOCKED)
|
||||
@ -22,15 +22,15 @@ CONTROLLER_0_UNLOCKED = \
|
||||
operational=consts.OPERATIONAL_ENABLED)
|
||||
|
||||
|
||||
@mock.patch("dcmanager.manager.states.upgrade.migrating_data."
|
||||
@mock.patch("dcmanager.orchestrator.states.upgrade.migrating_data."
|
||||
"DEFAULT_MAX_API_QUERIES", 3)
|
||||
@mock.patch("dcmanager.manager.states.upgrade.migrating_data."
|
||||
@mock.patch("dcmanager.orchestrator.states.upgrade.migrating_data."
|
||||
"DEFAULT_MAX_FAILED_QUERIES", 3)
|
||||
@mock.patch("dcmanager.manager.states.upgrade.migrating_data."
|
||||
@mock.patch("dcmanager.orchestrator.states.upgrade.migrating_data."
|
||||
"DEFAULT_API_SLEEP", 1)
|
||||
@mock.patch("dcmanager.manager.states.upgrade.migrating_data."
|
||||
@mock.patch("dcmanager.orchestrator.states.upgrade.migrating_data."
|
||||
"DEFAULT_FAILED_SLEEP", 1)
|
||||
@mock.patch("dcmanager.manager.states.upgrade.migrating_data."
|
||||
@mock.patch("dcmanager.orchestrator.states.upgrade.migrating_data."
|
||||
"DEFAULT_ANSIBLE_SLEEP", 3)
|
||||
class TestSwUpgradeMigratingDataStage(TestSwUpgradeState):
|
||||
|
||||
@ -54,7 +54,7 @@ class TestSwUpgradeMigratingDataStage(TestSwUpgradeState):
|
||||
# Simulate a failed subprocess call to the platform upgrade playbook
|
||||
# on the subcloud.
|
||||
p = mock.patch(
|
||||
'dcmanager.manager.states.upgrade.migrating_data.migrate_subcloud_data')
|
||||
'dcmanager.orchestrator.states.upgrade.migrating_data.migrate_subcloud_data')
|
||||
self.mock_platform_upgrade_call = p.start()
|
||||
self.mock_platform_upgrade_call.side_effect = Exception("Bad day!")
|
||||
self.addCleanup(p.stop)
|
||||
@ -73,7 +73,7 @@ class TestSwUpgradeMigratingDataStage(TestSwUpgradeState):
|
||||
# Simulate a successful subprocess call to the platform upgrade playbook
|
||||
# on the subcloud.
|
||||
p = mock.patch(
|
||||
'dcmanager.manager.states.upgrade.migrating_data.migrate_subcloud_data')
|
||||
'dcmanager.orchestrator.states.upgrade.migrating_data.migrate_subcloud_data')
|
||||
self.mock_platform_upgrade_call = p.start()
|
||||
self.mock_platform_upgrade_call.return_value = 0
|
||||
self.addCleanup(p.stop)
|
||||
@ -167,7 +167,7 @@ class TestSwUpgradeMigratingDataStage(TestSwUpgradeState):
|
||||
# Simulate a successful subprocess call to the platform upgrade playbook
|
||||
# on the subcloud.
|
||||
p = mock.patch(
|
||||
'dcmanager.manager.states.upgrade.migrating_data.migrate_subcloud_data')
|
||||
'dcmanager.orchestrator.states.upgrade.migrating_data.migrate_subcloud_data')
|
||||
self.mock_platform_upgrade_call = p.start()
|
||||
self.mock_platform_upgrade_call.return_value = 0
|
||||
self.addCleanup(p.stop)
|
||||
@ -196,7 +196,7 @@ class TestSwUpgradeMigratingDataStage(TestSwUpgradeState):
|
||||
# Simulate a successful subprocess call to the platform upgrade playbook
|
||||
# on the subcloud.
|
||||
p = mock.patch(
|
||||
'dcmanager.manager.states.upgrade.migrating_data.migrate_subcloud_data')
|
||||
'dcmanager.orchestrator.states.upgrade.migrating_data.migrate_subcloud_data')
|
||||
self.mock_platform_upgrade_call = p.start()
|
||||
self.mock_platform_upgrade_call.return_value = 0
|
||||
self.addCleanup(p.stop)
|
@ -7,9 +7,9 @@ import mock
|
||||
|
||||
from dcmanager.common import consts
|
||||
|
||||
from dcmanager.tests.unit.manager.states.fakes import FakeHostFilesystem
|
||||
from dcmanager.tests.unit.manager.states.fakes import FakeSubcloud
|
||||
from dcmanager.tests.unit.manager.states.upgrade.test_base \
|
||||
from dcmanager.tests.unit.orchestrator.states.fakes import FakeHostFilesystem
|
||||
from dcmanager.tests.unit.orchestrator.states.fakes import FakeSubcloud
|
||||
from dcmanager.tests.unit.orchestrator.states.upgrade.test_base \
|
||||
import TestSwUpgradeState
|
||||
|
||||
CONTROLLER_0_HOST_FS_SCRATCH_MIN_SIZED = FakeHostFilesystem(size=16)
|
@ -7,10 +7,10 @@ import itertools
|
||||
import mock
|
||||
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.manager.states.upgrade import starting_upgrade
|
||||
from dcmanager.orchestrator.states.upgrade import starting_upgrade
|
||||
|
||||
from dcmanager.tests.unit.manager.states.fakes import FakeUpgrade
|
||||
from dcmanager.tests.unit.manager.states.upgrade.test_base \
|
||||
from dcmanager.tests.unit.orchestrator.states.fakes import FakeUpgrade
|
||||
from dcmanager.tests.unit.orchestrator.states.upgrade.test_base \
|
||||
import TestSwUpgradeState
|
||||
|
||||
UPGRADE_ABORTING = FakeUpgrade(state='aborting')
|
||||
@ -18,9 +18,9 @@ UPGRADE_STARTING = FakeUpgrade(state='starting')
|
||||
UPGRADE_STARTED = FakeUpgrade(state='started')
|
||||
|
||||
|
||||
@mock.patch("dcmanager.manager.states.upgrade.starting_upgrade"
|
||||
@mock.patch("dcmanager.orchestrator.states.upgrade.starting_upgrade"
|
||||
".DEFAULT_MAX_QUERIES", 3)
|
||||
@mock.patch("dcmanager.manager.states.upgrade.starting_upgrade"
|
||||
@mock.patch("dcmanager.orchestrator.states.upgrade.starting_upgrade"
|
||||
".DEFAULT_SLEEP_DURATION", 1)
|
||||
class TestSwUpgradeStartingUpgradeStage(TestSwUpgradeState):
|
||||
|
@ -7,10 +7,10 @@ import itertools
|
||||
import mock
|
||||
|
||||
from dcmanager.common import consts
|
||||
from dcmanager.manager.states import unlock_host
|
||||
from dcmanager.orchestrator.states import unlock_host
|
||||
|
||||
from dcmanager.tests.unit.manager.states.fakes import FakeController
|
||||
from dcmanager.tests.unit.manager.states.upgrade.test_base \
|
||||
from dcmanager.tests.unit.orchestrator.states.fakes import FakeController
|
||||
from dcmanager.tests.unit.orchestrator.states.upgrade.test_base \
|
||||
import TestSwUpgradeState
|
||||
|
||||
CONTROLLER_0_UNLOCKED = \
|
||||
@ -26,11 +26,11 @@ CONTROLLER_0_UNLOCKING_FAILED = \
|
||||
task='Swacting')
|
||||
|
||||
|
||||
@mock.patch("dcmanager.manager.states.unlock_host.DEFAULT_MAX_API_QUERIES", 3)
|
||||
@mock.patch("dcmanager.manager.states.unlock_host.DEFAULT_MAX_FAILED_QUERIES",
|
||||
@mock.patch("dcmanager.orchestrator.states.unlock_host.DEFAULT_MAX_API_QUERIES", 3)
|
||||
@mock.patch("dcmanager.orchestrator.states.unlock_host.DEFAULT_MAX_FAILED_QUERIES",
|
||||
3)
|
||||
@mock.patch("dcmanager.manager.states.unlock_host.DEFAULT_API_SLEEP", 1)
|
||||
@mock.patch("dcmanager.manager.states.unlock_host.DEFAULT_FAILED_SLEEP", 1)
|
||||
@mock.patch("dcmanager.orchestrator.states.unlock_host.DEFAULT_API_SLEEP", 1)
|
||||
@mock.patch("dcmanager.orchestrator.states.unlock_host.DEFAULT_FAILED_SLEEP", 1)
|
||||
class TestSwUpgradeUnlockControllerStage(TestSwUpgradeState):
|
||||
|
||||
def setUp(self):
|
@ -28,8 +28,8 @@ from dcmanager.common import consts
|
||||
from dcmanager.common import context
|
||||
from dcmanager.common import exceptions
|
||||
from dcmanager.db.sqlalchemy import api as db_api
|
||||
from dcmanager.manager import patch_orch_thread
|
||||
from dcmanager.manager import sw_update_manager
|
||||
from dcmanager.orchestrator import patch_orch_thread
|
||||
from dcmanager.orchestrator import sw_update_manager
|
||||
from dcmanager.tests import base
|
||||
from dcmanager.tests import utils
|
||||
from dcorch.common import consts as dcorch_consts
|
322
distributedcloud/ocf/dcmanager-orchestrator
Normal file
322
distributedcloud/ocf/dcmanager-orchestrator
Normal file
@ -0,0 +1,322 @@
|
||||
#!/bin/sh
|
||||
# OpenStack DC Manager Orchestrator Service (dcmanager-orchestrator)
|
||||
#
|
||||
# Description:
|
||||
# Manages an OpenStack DC Manager Orchestrator Service (dcmanager-orchestrator)
|
||||
# process as an HA resource
|
||||
#
|
||||
# Copyright (c) 2020 Wind River Systems, Inc.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
#
|
||||
# See usage() function below for more details ...
|
||||
#
|
||||
# OCF instance parameters:
|
||||
# OCF_RESKEY_binary
|
||||
# OCF_RESKEY_config
|
||||
# OCF_RESKEY_user
|
||||
# OCF_RESKEY_pid
|
||||
# OCF_RESKEY_additional_parameters
|
||||
#######################################################################
|
||||
# Initialization:
|
||||
|
||||
: ${OCF_FUNCTIONS_DIR=${OCF_ROOT}/lib/heartbeat}
|
||||
. ${OCF_FUNCTIONS_DIR}/ocf-shellfuncs
|
||||
|
||||
#######################################################################
|
||||
|
||||
# Fill in some defaults if no values are specified
|
||||
|
||||
OCF_RESKEY_binary_default="/usr/bin/dcmanager-orchestrator"
|
||||
OCF_RESKEY_config_default="/etc/dcmanager/dcmanager.conf"
|
||||
OCF_RESKEY_user_default="root"
|
||||
OCF_RESKEY_pid_default="$HA_RSCTMP/$OCF_RESOURCE_INSTANCE.pid"
|
||||
|
||||
: ${OCF_RESKEY_binary=${OCF_RESKEY_binary_default}}
|
||||
: ${OCF_RESKEY_config=${OCF_RESKEY_config_default}}
|
||||
: ${OCF_RESKEY_user=${OCF_RESKEY_user_default}}
|
||||
: ${OCF_RESKEY_pid=${OCF_RESKEY_pid_default}}
|
||||
|
||||
#######################################################################
|
||||
|
||||
usage() {
|
||||
cat <<UEND
|
||||
usage: $0 (start|stop|validate-all|meta-data|status|monitor)
|
||||
|
||||
$0 manages an OpenStack DC Manager Orchestrator service (dcmanager-orchestrator) process as an HA resource
|
||||
|
||||
The 'start' operation starts the dcmanager-orchestrator service.
|
||||
The 'stop' operation stops the dcmanager-orchestrator service.
|
||||
The 'validate-all' operation reports whether the parameters are valid
|
||||
The 'meta-data' operation reports this RA's meta-data information
|
||||
The 'status' operation reports whether the dcmanager-orchestrator service is running
|
||||
The 'monitor' operation reports whether the dcmanager-orchestrator service seems to be working
|
||||
|
||||
UEND
|
||||
}
|
||||
|
||||
meta_data() {
|
||||
cat <<END
|
||||
<?xml version="1.0"?>
|
||||
<!DOCTYPE resource-agent SYSTEM "ra-api-1.dtd">
|
||||
<resource-agent name="dcmanager-orchestrator">
|
||||
<version>1.0</version>
|
||||
|
||||
<longdesc lang="en">
|
||||
Resource agent for the DC Manager service (dcmanager-orchestrator)
|
||||
</longdesc>
|
||||
<shortdesc lang="en">Manages the OpenStack DC Manager Orchestrator Service (dcmanager-orchestrator)</shortdesc>
|
||||
<parameters>
|
||||
|
||||
<parameter name="binary" unique="0" required="0">
|
||||
<longdesc lang="en">
|
||||
Location of the DC Manager Orchestrator Service binary (dcmanager-orchestrator)
|
||||
</longdesc>
|
||||
<shortdesc lang="en">DC Manager Orchestrator Service binary (dcmanager-orchestrator)</shortdesc>
|
||||
<content type="string" default="${OCF_RESKEY_binary_default}" />
|
||||
</parameter>
|
||||
|
||||
<parameter name="config" unique="0" required="0">
|
||||
<longdesc lang="en">
|
||||
Location of the DC Manager Orchestrator Service (dcmanager-orchestrator) configuration file
|
||||
</longdesc>
|
||||
<shortdesc lang="en">DC Manager Orchestrator Service (dcmanager-orchestrator registry) config file</shortdesc>
|
||||
<content type="string" default="${OCF_RESKEY_config_default}" />
|
||||
</parameter>
|
||||
|
||||
<parameter name="user" unique="0" required="0">
|
||||
<longdesc lang="en">
|
||||
User running DC Manager Orchestrator Service (dcmanager-orchestrator)
|
||||
</longdesc>
|
||||
<shortdesc lang="en">DC Manager Orchestrator Service (dcmanager-orchestrator) user</shortdesc>
|
||||
<content type="string" default="${OCF_RESKEY_user_default}" />
|
||||
</parameter>
|
||||
|
||||
<parameter name="pid" unique="0" required="0">
|
||||
<longdesc lang="en">
|
||||
The pid file to use for this DC Manager Orchestrator Service (dcmanager-orchestrator) instance
|
||||
</longdesc>
|
||||
<shortdesc lang="en">DC Manager Orchestrator Service (dcmanager-orchestrator) pid file</shortdesc>
|
||||
<content type="string" default="${OCF_RESKEY_pid_default}" />
|
||||
</parameter>
|
||||
|
||||
<parameter name="additional_parameters" unique="0" required="0">
|
||||
<longdesc lang="en">
|
||||
Additional parameters to pass on to the dcmanager-orchestrator
|
||||
</longdesc>
|
||||
<shortdesc lang="en">Additional parameters for dcmanager-orchestrator</shortdesc>
|
||||
<content type="string" />
|
||||
</parameter>
|
||||
|
||||
</parameters>
|
||||
|
||||
<actions>
|
||||
<action name="start" timeout="20" />
|
||||
<action name="stop" timeout="20" />
|
||||
<action name="status" timeout="20" />
|
||||
<action name="monitor" timeout="10" interval="5" />
|
||||
<action name="validate-all" timeout="5" />
|
||||
<action name="meta-data" timeout="5" />
|
||||
</actions>
|
||||
</resource-agent>
|
||||
END
|
||||
}
|
||||
|
||||
#######################################################################
|
||||
# Functions invoked by resource manager actions
|
||||
|
||||
dcmanager_orchestrator_validate() {
|
||||
local rc
|
||||
|
||||
check_binary $OCF_RESKEY_binary
|
||||
check_binary curl
|
||||
check_binary tr
|
||||
check_binary grep
|
||||
check_binary cut
|
||||
check_binary head
|
||||
|
||||
# A config file on shared storage that is not available
|
||||
# during probes is OK.
|
||||
if [ ! -f $OCF_RESKEY_config ]; then
|
||||
if ! ocf_is_probe; then
|
||||
ocf_log err "Config $OCF_RESKEY_config doesn't exist"
|
||||
return $OCF_ERR_INSTALLED
|
||||
fi
|
||||
ocf_log_warn "Config $OCF_RESKEY_config not available during a probe"
|
||||
fi
|
||||
|
||||
getent passwd $OCF_RESKEY_user >/dev/null 2>&1
|
||||
rc=$?
|
||||
if [ $rc -ne 0 ]; then
|
||||
ocf_log err "User $OCF_RESKEY_user doesn't exist"
|
||||
return $OCF_ERR_INSTALLED
|
||||
fi
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
dcmanager_orchestrator_status() {
|
||||
local pid
|
||||
local rc
|
||||
|
||||
if [ ! -f $OCF_RESKEY_pid ]; then
|
||||
ocf_log info "DC Manager Orchestrator Service (dcmanager-orchestrator) is not running"
|
||||
return $OCF_NOT_RUNNING
|
||||
else
|
||||
pid=`cat $OCF_RESKEY_pid`
|
||||
fi
|
||||
|
||||
ocf_run -warn kill -s 0 $pid
|
||||
rc=$?
|
||||
if [ $rc -eq 0 ]; then
|
||||
return $OCF_SUCCESS
|
||||
else
|
||||
ocf_log info "Old PID file found, but DC Manager Orchestrator Service (dcmanager-orchestrator) is not running"
|
||||
rm -f $OCF_RESKEY_pid
|
||||
return $OCF_NOT_RUNNING
|
||||
fi
|
||||
}
|
||||
|
||||
dcmanager_orchestrator_monitor() {
|
||||
local rc
|
||||
|
||||
dcmanager_orchestrator_status
|
||||
rc=$?
|
||||
|
||||
# If status returned anything but success, return that immediately
|
||||
if [ $rc -ne $OCF_SUCCESS ]; then
|
||||
return $rc
|
||||
fi
|
||||
|
||||
ocf_log debug "DC Manager Orchestrator Service (dcmanager-orchestrator) monitor succeeded"
|
||||
return $OCF_SUCCESS
|
||||
}
|
||||
|
||||
dcmanager_orchestrator_start() {
|
||||
local rc
|
||||
|
||||
dcmanager_orchestrator_status
|
||||
rc=$?
|
||||
if [ $rc -eq $OCF_SUCCESS ]; then
|
||||
ocf_log info "DC Manager Orchestrator Service (dcmanager-orchestrator) already running"
|
||||
return $OCF_SUCCESS
|
||||
fi
|
||||
|
||||
# Change the working dir to /, to be sure it's accesible
|
||||
cd /
|
||||
|
||||
# run the actual dcmanager-orchestrator daemon. Don't use ocf_run as we're sending the tool's output
|
||||
# straight to /dev/null anyway and using ocf_run would break stdout-redirection here.
|
||||
su ${OCF_RESKEY_user} -s /bin/sh -c "${OCF_RESKEY_binary} --config-file=$OCF_RESKEY_config \
|
||||
$OCF_RESKEY_additional_parameters"' >> /dev/null 2>&1 & echo $!' > $OCF_RESKEY_pid
|
||||
|
||||
# Spin waiting for the server to come up.
|
||||
# Let the CRM/LRM time us out if required
|
||||
while true; do
|
||||
dcmanager_orchestrator_monitor
|
||||
rc=$?
|
||||
[ $rc -eq $OCF_SUCCESS ] && break
|
||||
if [ $rc -ne $OCF_NOT_RUNNING ]; then
|
||||
ocf_log err "DC Manager Orchestrator Service (dcmanager-orchestrator) start failed"
|
||||
exit $OCF_ERR_GENERIC
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
|
||||
ocf_log info "DC Manager Orchestrator Service (dcmanager-orchestrator) started"
|
||||
return $OCF_SUCCESS
|
||||
}
|
||||
|
||||
dcmanager_orchestrator_confirm_stop() {
|
||||
local my_bin
|
||||
local my_processes
|
||||
|
||||
my_binary=`which ${OCF_RESKEY_binary}`
|
||||
my_processes=`pgrep -l -f "^(python|/usr/bin/python|/usr/bin/python2) ${my_binary}([^\w-]|$)"`
|
||||
|
||||
if [ -n "${my_processes}" ]
|
||||
then
|
||||
ocf_log info "About to SIGKILL the following: ${my_processes}"
|
||||
pkill -KILL -f "^(python|/usr/bin/python|/usr/bin/python2) ${my_binary}([^\w-]|$)"
|
||||
fi
|
||||
}
|
||||
|
||||
dcmanager_orchestrator_stop() {
|
||||
local rc
|
||||
local pid
|
||||
|
||||
dcmanager_orchestrator_status
|
||||
rc=$?
|
||||
if [ $rc -eq $OCF_NOT_RUNNING ]; then
|
||||
ocf_log info "DC Manager Orchestrator Service (dcmanager-orchestrator) already stopped"
|
||||
dcmanager_orchestrator_confirm_stop
|
||||
return $OCF_SUCCESS
|
||||
fi
|
||||
|
||||
# Try SIGTERM
|
||||
pid=`cat $OCF_RESKEY_pid`
|
||||
ocf_run kill -s TERM $pid
|
||||
rc=$?
|
||||
if [ $rc -ne 0 ]; then
|
||||
ocf_log err "DC Manager Orchestrator Service (dcmanager-orchestrator) couldn't be stopped"
|
||||
dcmanager_orchestrator_confirm_stop
|
||||
exit $OCF_ERR_GENERIC
|
||||
fi
|
||||
|
||||
# stop waiting
|
||||
shutdown_timeout=15
|
||||
if [ -n "$OCF_RESKEY_CRM_meta_timeout" ]; then
|
||||
shutdown_timeout=$((($OCF_RESKEY_CRM_meta_timeout/1000)-5))
|
||||
fi
|
||||
count=0
|
||||
while [ $count -lt $shutdown_timeout ]; do
|
||||
dcmanager_orchestrator_status
|
||||
rc=$?
|
||||
if [ $rc -eq $OCF_NOT_RUNNING ]; then
|
||||
break
|
||||
fi
|
||||
count=`expr $count + 1`
|
||||
sleep 1
|
||||
ocf_log debug "DC Manager Orchestrator Service (dcmanager-orchestrator) still hasn't stopped yet. Waiting ..."
|
||||
done
|
||||
|
||||
dcmanager_orchestrator_status
|
||||
rc=$?
|
||||
if [ $rc -ne $OCF_NOT_RUNNING ]; then
|
||||
# SIGTERM didn't help either, try SIGKILL
|
||||
ocf_log info "DC Manager Orchestrator Service (dcmanager-orchestrator) failed to stop after ${shutdown_timeout}s \
|
||||
using SIGTERM. Trying SIGKILL ..."
|
||||
ocf_run kill -s KILL $pid
|
||||
fi
|
||||
dcmanager_orchestrator_confirm_stop
|
||||
|
||||
ocf_log info "DC Manager Orchestrator Service (dcmanager-orchestrator) stopped"
|
||||
|
||||
rm -f $OCF_RESKEY_pid
|
||||
|
||||
return $OCF_SUCCESS
|
||||
}
|
||||
|
||||
#######################################################################
|
||||
|
||||
case "$1" in
|
||||
meta-data) meta_data
|
||||
exit $OCF_SUCCESS;;
|
||||
usage|help) usage
|
||||
exit $OCF_SUCCESS;;
|
||||
esac
|
||||
|
||||
# Anything except meta-data and help must pass validation
|
||||
dcmanager_orchestrator_validate || exit $?
|
||||
|
||||
# What kind of method was invoked?
|
||||
case "$1" in
|
||||
start) dcmanager_orchestrator_start;;
|
||||
stop) dcmanager_orchestrator_stop;;
|
||||
status) dcmanager_orchestrator_status;;
|
||||
monitor) dcmanager_orchestrator_monitor;;
|
||||
validate-all) ;;
|
||||
*) usage
|
||||
exit $OCF_ERR_UNIMPLEMENTED;;
|
||||
esac
|
@ -30,6 +30,7 @@ packages =
|
||||
console_scripts =
|
||||
dcmanager-api = dcmanager.cmd.api:main
|
||||
dcmanager-audit = dcmanager.cmd.audit:main
|
||||
dcmanager-orchestrator = dcmanager.cmd.orchestrator:main
|
||||
dcmanager-manager = dcmanager.cmd.manager:main
|
||||
dcmanager-manage = dcmanager.cmd.manage:main
|
||||
dcorch-api = dcorch.cmd.api:main
|
||||
|
Loading…
x
Reference in New Issue
Block a user