From 4d23d8aab55842c915f3982862136efb1a1b8569 Mon Sep 17 00:00:00 2001 From: gtema <artem.goncharov@gmail.com> Date: Mon, 4 Mar 2024 16:25:02 +0100 Subject: [PATCH] Initial import Depends-On: https://review.opendev.org/c/openstack/governance/+/910581 Change-Id: I87866224d9ac89d3da150c95a86a20afb48dc4d2 --- .gitignore | 157 ++ .pre-commit-config.yaml | 40 + .stestr.conf | 3 + README.rst | 27 + bindep.txt | 12 + codegenerator/__init__.py | 0 codegenerator/ansible.py | 101 + codegenerator/base.py | 78 + codegenerator/cli.py | 344 +++ codegenerator/common/__init__.py | 443 +++ codegenerator/common/rust.py | 1135 ++++++++ codegenerator/common/schema.py | 154 ++ codegenerator/jsonschema.py | 85 + codegenerator/metadata.py | 740 +++++ codegenerator/model.py | 666 +++++ codegenerator/openapi/base.py | 1237 +++++++++ codegenerator/openapi/cinder.py | 346 +++ codegenerator/openapi/cinder_schemas.py | 609 +++++ codegenerator/openapi/glance.py | 703 +++++ codegenerator/openapi/keystone.py | 483 ++++ .../openapi/keystone_schemas/__init__.py | 11 + .../application_credential.py | 183 ++ .../openapi/keystone_schemas/auth.py | 634 +++++ .../openapi/keystone_schemas/common.py | 43 + .../openapi/keystone_schemas/domain.py | 165 ++ .../openapi/keystone_schemas/endpoint.py | 159 ++ .../openapi/keystone_schemas/federation.py | 447 ++++ .../openapi/keystone_schemas/group.py | 124 + .../openapi/keystone_schemas/project.py | 157 ++ .../openapi/keystone_schemas/region.py | 109 + .../openapi/keystone_schemas/role.py | 436 +++ .../openapi/keystone_schemas/service.py | 117 + .../openapi/keystone_schemas/user.py | 257 ++ codegenerator/openapi/neutron.py | 1174 ++++++++ codegenerator/openapi/neutron_schemas.py | 148 + codegenerator/openapi/nova.py | 662 +++++ codegenerator/openapi/nova_schemas.py | 2378 +++++++++++++++++ codegenerator/openapi/octavia.py | 392 +++ codegenerator/openapi/placement.py | 108 + codegenerator/openapi/utils.py | 533 ++++ codegenerator/openapi_spec.py | 90 + codegenerator/osc.py | 220 ++ codegenerator/rust_cli.py | 1374 ++++++++++ codegenerator/rust_sdk.py | 572 ++++ .../templates/ansible/impl_mod.py.j2 | 136 + .../templates/ansible/impl_mod_info.py.j2 | 92 + .../templates/ansible/test_playbook.yaml.j2 | 55 + codegenerator/templates/osc/impl_common.py.j2 | 37 + codegenerator/templates/osc/impl_create.py.j2 | 114 + codegenerator/templates/osc/impl_delete.py.j2 | 66 + codegenerator/templates/osc/impl_list.py.j2 | 130 + codegenerator/templates/osc/impl_set.py.j2 | 119 + codegenerator/templates/osc/impl_show.py.j2 | 66 + codegenerator/templates/osc/impl_unset.py.j2 | 100 + .../templates/osc/test_unit_create.py.j2 | 130 + .../templates/osc/test_unit_delete.py.j2 | 136 + .../templates/osc/test_unit_list.py.j2 | 132 + .../templates/osc/test_unit_set.py.j2 | 134 + .../templates/osc/test_unit_show.py.j2 | 93 + .../templates/osc/test_unit_unset.py.j2 | 142 + codegenerator/templates/rust_cli/impl.rs.j2 | 257 ++ .../templates/rust_cli/impl_image_download.j2 | 28 + .../templates/rust_cli/invoke_create.j2 | 3 + .../templates/rust_cli/invoke_download.j2 | 15 + .../templates/rust_cli/invoke_list.j2 | 15 + .../templates/rust_cli/invoke_patch.j2 | 70 + .../templates/rust_cli/invoke_upload.j2 | 5 + .../templates/rust_cli/parameters.j2 | 26 + .../templates/rust_cli/path_parameters.j2 | 12 + .../templates/rust_cli/query_parameters.j2 | 12 + .../templates/rust_cli/response_struct.j2 | 171 ++ .../templates/rust_cli/set_body_parameters.j2 | 70 + .../templates/rust_cli/set_path_parameters.j2 | 18 + .../rust_cli/set_query_parameters.j2 | 23 + codegenerator/templates/rust_macros.j2 | 353 +++ codegenerator/templates/rust_sdk/find.rs.j2 | 146 + codegenerator/templates/rust_sdk/impl.rs.j2 | 388 +++ codegenerator/templates/rust_sdk/mod.rs.j2 | 30 + .../templates/rust_sdk/request_struct.j2 | 57 + codegenerator/templates/rust_sdk/subtypes.j2 | 70 + codegenerator/templates/sdk/impl_schema.py.j2 | 24 + codegenerator/tests/functional/__init__.py | 0 .../tests/functional/test_openapi_cinder.py | 37 + .../tests/functional/test_openapi_glance.py | 35 + .../tests/functional/test_openapi_keystone.py | 37 + .../tests/functional/test_openapi_neutron.py | 35 + .../tests/functional/test_openapi_nova.py | 35 + .../tests/functional/test_openapi_octavia.py | 37 + codegenerator/tests/unit/__init__.py | 0 codegenerator/tests/unit/test_common.py | 152 ++ codegenerator/tests/unit/test_model.py | 1330 +++++++++ codegenerator/tests/unit/test_rust_cli.py | 88 + codegenerator/tests/unit/test_rust_sdk.py | 210 ++ codegenerator/types.py | 84 + doc/requirements.txt | 4 + doc/source/ansible.rst | 10 + doc/source/conf.py | 97 + doc/source/index.rst | 21 + doc/source/metadata.rst | 15 + doc/source/openapi.rst | 186 ++ doc/source/osc.rst | 10 + doc/source/rust_cli.rst | 22 + doc/source/rust_sdk.rst | 13 + releasenotes/notes/.placeholder | 0 releasenotes/source/_static/.placeholder | 0 releasenotes/source/_templates/.placeholder | 0 releasenotes/source/conf.py | 302 +++ releasenotes/source/index.rst | 16 + releasenotes/source/unreleased.rst | 5 + requirements.txt | 16 + setup.cfg | 55 + setup.py | 17 + test-requirements.txt | 2 + tools/generate_openapi_specs.sh | 30 + tools/generate_rust.sh | 13 + tools/generate_rust_block_storage.sh | 20 + tools/generate_rust_compute.sh | 23 + tools/generate_rust_identity.sh | 28 + tools/generate_rust_image.sh | 18 + tools/generate_rust_network.sh | 27 + tools/generate_rust_swift.sh | 47 + tox.ini | 79 + zuul.d/project.yaml | 8 + 123 files changed, 24495 insertions(+) create mode 100644 .gitignore create mode 100644 .pre-commit-config.yaml create mode 100644 .stestr.conf create mode 100644 README.rst create mode 100644 bindep.txt create mode 100644 codegenerator/__init__.py create mode 100644 codegenerator/ansible.py create mode 100644 codegenerator/base.py create mode 100644 codegenerator/cli.py create mode 100644 codegenerator/common/__init__.py create mode 100644 codegenerator/common/rust.py create mode 100644 codegenerator/common/schema.py create mode 100644 codegenerator/jsonschema.py create mode 100644 codegenerator/metadata.py create mode 100644 codegenerator/model.py create mode 100644 codegenerator/openapi/base.py create mode 100644 codegenerator/openapi/cinder.py create mode 100644 codegenerator/openapi/cinder_schemas.py create mode 100644 codegenerator/openapi/glance.py create mode 100644 codegenerator/openapi/keystone.py create mode 100644 codegenerator/openapi/keystone_schemas/__init__.py create mode 100644 codegenerator/openapi/keystone_schemas/application_credential.py create mode 100644 codegenerator/openapi/keystone_schemas/auth.py create mode 100644 codegenerator/openapi/keystone_schemas/common.py create mode 100644 codegenerator/openapi/keystone_schemas/domain.py create mode 100644 codegenerator/openapi/keystone_schemas/endpoint.py create mode 100644 codegenerator/openapi/keystone_schemas/federation.py create mode 100644 codegenerator/openapi/keystone_schemas/group.py create mode 100644 codegenerator/openapi/keystone_schemas/project.py create mode 100644 codegenerator/openapi/keystone_schemas/region.py create mode 100644 codegenerator/openapi/keystone_schemas/role.py create mode 100644 codegenerator/openapi/keystone_schemas/service.py create mode 100644 codegenerator/openapi/keystone_schemas/user.py create mode 100644 codegenerator/openapi/neutron.py create mode 100644 codegenerator/openapi/neutron_schemas.py create mode 100644 codegenerator/openapi/nova.py create mode 100644 codegenerator/openapi/nova_schemas.py create mode 100644 codegenerator/openapi/octavia.py create mode 100644 codegenerator/openapi/placement.py create mode 100644 codegenerator/openapi/utils.py create mode 100644 codegenerator/openapi_spec.py create mode 100644 codegenerator/osc.py create mode 100644 codegenerator/rust_cli.py create mode 100644 codegenerator/rust_sdk.py create mode 100644 codegenerator/templates/ansible/impl_mod.py.j2 create mode 100644 codegenerator/templates/ansible/impl_mod_info.py.j2 create mode 100644 codegenerator/templates/ansible/test_playbook.yaml.j2 create mode 100644 codegenerator/templates/osc/impl_common.py.j2 create mode 100644 codegenerator/templates/osc/impl_create.py.j2 create mode 100644 codegenerator/templates/osc/impl_delete.py.j2 create mode 100644 codegenerator/templates/osc/impl_list.py.j2 create mode 100644 codegenerator/templates/osc/impl_set.py.j2 create mode 100644 codegenerator/templates/osc/impl_show.py.j2 create mode 100644 codegenerator/templates/osc/impl_unset.py.j2 create mode 100644 codegenerator/templates/osc/test_unit_create.py.j2 create mode 100644 codegenerator/templates/osc/test_unit_delete.py.j2 create mode 100644 codegenerator/templates/osc/test_unit_list.py.j2 create mode 100644 codegenerator/templates/osc/test_unit_set.py.j2 create mode 100644 codegenerator/templates/osc/test_unit_show.py.j2 create mode 100644 codegenerator/templates/osc/test_unit_unset.py.j2 create mode 100644 codegenerator/templates/rust_cli/impl.rs.j2 create mode 100644 codegenerator/templates/rust_cli/impl_image_download.j2 create mode 100644 codegenerator/templates/rust_cli/invoke_create.j2 create mode 100644 codegenerator/templates/rust_cli/invoke_download.j2 create mode 100644 codegenerator/templates/rust_cli/invoke_list.j2 create mode 100644 codegenerator/templates/rust_cli/invoke_patch.j2 create mode 100644 codegenerator/templates/rust_cli/invoke_upload.j2 create mode 100644 codegenerator/templates/rust_cli/parameters.j2 create mode 100644 codegenerator/templates/rust_cli/path_parameters.j2 create mode 100644 codegenerator/templates/rust_cli/query_parameters.j2 create mode 100644 codegenerator/templates/rust_cli/response_struct.j2 create mode 100644 codegenerator/templates/rust_cli/set_body_parameters.j2 create mode 100644 codegenerator/templates/rust_cli/set_path_parameters.j2 create mode 100644 codegenerator/templates/rust_cli/set_query_parameters.j2 create mode 100644 codegenerator/templates/rust_macros.j2 create mode 100644 codegenerator/templates/rust_sdk/find.rs.j2 create mode 100644 codegenerator/templates/rust_sdk/impl.rs.j2 create mode 100644 codegenerator/templates/rust_sdk/mod.rs.j2 create mode 100644 codegenerator/templates/rust_sdk/request_struct.j2 create mode 100644 codegenerator/templates/rust_sdk/subtypes.j2 create mode 100644 codegenerator/templates/sdk/impl_schema.py.j2 create mode 100644 codegenerator/tests/functional/__init__.py create mode 100644 codegenerator/tests/functional/test_openapi_cinder.py create mode 100644 codegenerator/tests/functional/test_openapi_glance.py create mode 100644 codegenerator/tests/functional/test_openapi_keystone.py create mode 100644 codegenerator/tests/functional/test_openapi_neutron.py create mode 100644 codegenerator/tests/functional/test_openapi_nova.py create mode 100644 codegenerator/tests/functional/test_openapi_octavia.py create mode 100644 codegenerator/tests/unit/__init__.py create mode 100644 codegenerator/tests/unit/test_common.py create mode 100644 codegenerator/tests/unit/test_model.py create mode 100644 codegenerator/tests/unit/test_rust_cli.py create mode 100644 codegenerator/tests/unit/test_rust_sdk.py create mode 100644 codegenerator/types.py create mode 100644 doc/requirements.txt create mode 100644 doc/source/ansible.rst create mode 100644 doc/source/conf.py create mode 100644 doc/source/index.rst create mode 100644 doc/source/metadata.rst create mode 100644 doc/source/openapi.rst create mode 100644 doc/source/osc.rst create mode 100644 doc/source/rust_cli.rst create mode 100644 doc/source/rust_sdk.rst create mode 100644 releasenotes/notes/.placeholder create mode 100644 releasenotes/source/_static/.placeholder create mode 100644 releasenotes/source/_templates/.placeholder create mode 100644 releasenotes/source/conf.py create mode 100644 releasenotes/source/index.rst create mode 100644 releasenotes/source/unreleased.rst create mode 100644 requirements.txt create mode 100644 setup.cfg create mode 100644 setup.py create mode 100644 test-requirements.txt create mode 100755 tools/generate_openapi_specs.sh create mode 100755 tools/generate_rust.sh create mode 100755 tools/generate_rust_block_storage.sh create mode 100755 tools/generate_rust_compute.sh create mode 100755 tools/generate_rust_identity.sh create mode 100755 tools/generate_rust_image.sh create mode 100755 tools/generate_rust_network.sh create mode 100755 tools/generate_rust_swift.sh create mode 100644 tox.ini create mode 100644 zuul.d/project.yaml diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..2e8f710 --- /dev/null +++ b/.gitignore @@ -0,0 +1,157 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +.stestr/ + +wrk/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..da7e215 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,40 @@ +--- +default_language_version: + # force all unspecified python hooks to run python3 + python: python3 +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: trailing-whitespace + - id: mixed-line-ending + args: ['--fix', 'lf'] + exclude: '.*\.(svg)$' + - id: check-byte-order-marker + - id: check-executables-have-shebangs + - id: check-merge-conflict + - id: debug-statements + - id: check-yaml + files: .*\.(yaml|yml)$ + exclude: '^zuul.d/.*$' + - repo: https://github.com/PyCQA/doc8 + rev: v1.1.1 + hooks: + - id: doc8 + - repo: https://github.com/psf/black + rev: 24.2.0 + hooks: + - id: black + args: ['-l', '79'] + exclude: '^codegenerator/templates/.*$' + - repo: https://github.com/pre-commit/mirrors-mypy + rev: v1.4.1 + hooks: + - id: mypy + language: python + types: [python] + args: ["codegenerator"] + pass_filenames: false + additional_dependencies: + - types-decorator + - types-PyYAML diff --git a/.stestr.conf b/.stestr.conf new file mode 100644 index 0000000..5648ced --- /dev/null +++ b/.stestr.conf @@ -0,0 +1,3 @@ +[DEFAULT] +test_path=./codegenerator/tests/unit +top_dir=./ diff --git a/README.rst b/README.rst new file mode 100644 index 0000000..28ba9dd --- /dev/null +++ b/README.rst @@ -0,0 +1,27 @@ +======================= +OpenStack CodeGenerator +======================= + +Primary goal of the project is to simplify maintainers life by generating +complete or at least parts of the code. + +OpenApi Schema +============== + +CodeGenerator is able to generate OpenAPI specs for certain services by +inspecting their code. This requires service package being installed in the +environment where the generator is running. It then tries to initialize service +application and for supported runtimes scans for the exposed operations. At the +moment following services are covered: + +- Nova + +- Neutron + +- Cinder + +- Glance + +- Keystone + +- Octavia diff --git a/bindep.txt b/bindep.txt new file mode 100644 index 0000000..1e462d1 --- /dev/null +++ b/bindep.txt @@ -0,0 +1,12 @@ +# This is a cross-platform list tracking distribution packages needed for +# install and tests; +# see https://docs.openstack.org/infra/bindep/ for additional information. + +build-essential [platform:dpkg test] +gcc [platform:rpm test] + +# Cinder/Keystone/... require ssl and crypt to be present +cryptsetup [platform:rpm] +cryptsetup-bin [platform:dpkg] +libssl-dev [platform:dpkg] +openssl-devel [platform:rpm] diff --git a/codegenerator/__init__.py b/codegenerator/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/codegenerator/ansible.py b/codegenerator/ansible.py new file mode 100644 index 0000000..7efe8f5 --- /dev/null +++ b/codegenerator/ansible.py @@ -0,0 +1,101 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +import logging +from pathlib import Path + +from codegenerator.base import BaseGenerator + +from openstack.test.fakes import generate_fake_resource + + +class AnsibleGenerator(BaseGenerator): + def __init__(self): + super().__init__() + + def _render_command( + self, + context: dict, + osc_path: list, + impl_template: str, + impl_dest: Path, + test_template: str, + test_dest: Path, + ): + """Render command code""" + self._render(impl_template, context, impl_dest.parent, impl_dest.name) + + unittest_path = test_dest.parent + + unittest_path.mkdir(parents=True, exist_ok=True) + Path(unittest_path, "__init__.py").touch() + + self._render(test_template, context, test_dest.parent, test_dest.name) + + def generate(self, res, target_dir, args=None): + """Generate code for the Ansible""" + logging.debug("Generating Ansible code in %s" % target_dir) + ansible_path = ["plugins", "modules"] + + context = dict( + res=res.resource_class, + sdk_mod_name=res.mod_name, + class_name=res.class_name, + resource_name=res.class_name.lower(), + sdk_service_name=res.service_name, + proxy=res.proxy_obj, + fqcn=res.fqcn, + registry_name=res.registry_name, + attrs=res.attrs, + target_name=res.class_name.lower(), + ) + if args and args.alternative_target_name: + context["target_name"] = args.alternative_target_name + context["ansible_module_name"] = "".join( + [x.capitalize() for x in context["target_name"].split("_")] + ) + + work_dir = Path(target_dir, "ansible") + + # Generate fake resource to use in examples and tests + fake_resource = generate_fake_resource(res.resource_class) + context["fake_resource"] = fake_resource + + # Generate info module + self._render( + "ansible/impl_mod_info.py.j2", + context, + Path(work_dir, "/".join(ansible_path)), + f"{context['target_name']}_info.py", + ) + # Generate module + self._render( + "ansible/impl_mod.py.j2", + context, + Path(work_dir, "/".join(ansible_path)), + f"{context['target_name']}.py", + ) + # Generate ansible test role + tests_dir = Path(work_dir, "ci/roles/", context["target_name"]) + self._render( + "ansible/test_playbook.yaml.j2", + context, + Path(tests_dir, "ci/roles/", context["target_name"], "tasks"), + "main.yaml", + ) + # Format rendered code to have less flake complains. This will still + # not guarantee code is fitting perfect, since there might be too long + # lines + self._format_code( + Path(work_dir, "/".join(ansible_path)), + ) diff --git a/codegenerator/base.py b/codegenerator/base.py new file mode 100644 index 0000000..cc3016d --- /dev/null +++ b/codegenerator/base.py @@ -0,0 +1,78 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +import abc +import logging +from pathlib import Path +import subprocess +import mdformat as md + +from jinja2 import Environment +from jinja2 import FileSystemLoader +from jinja2 import select_autoescape +from jinja2 import StrictUndefined + + +def wrap_markdown(input: str, width: int = 79) -> str: + """Apply mardownify to wrap the markdown""" + return md.text(input, options={"wrap": width}) + + +class BaseGenerator: + def __init__(self): + # Lower debug level of mdformat + logging.getLogger("markdown_it").setLevel(logging.INFO) + + self.env = Environment( + loader=FileSystemLoader("codegenerator/templates"), + autoescape=select_autoescape(), + undefined=StrictUndefined, + ) + self.env.filters["wrap_markdown"] = wrap_markdown + + def get_parser(self, parser): + return parser + + def _render(self, template, context, dest, fname): + """Render single template""" + template = self.env.get_template(template) + content = template.render(**context) + dest.mkdir(parents=True, exist_ok=True) + with open(Path(dest, fname), "w") as fp: + logging.debug("Writing %s" % (fp.name)) + fp.write(content) + + def _format_code(self, *args): + """Format code using Black + + :param *args: Path to the code to format + """ + for path in args: + subprocess.run(["black", "-l", "79", path]) + + @abc.abstractmethod + def generate( + self, res, target_dir, openapi_spec=None, operation_id=None, args=None + ): + pass + + def generate_mod( + self, + target_dir, + mod_path, + mod_list: set[str], + url: str, + resouce_name: str, + service_name: str, + ): + pass diff --git a/codegenerator/cli.py b/codegenerator/cli.py new file mode 100644 index 0000000..aaa5050 --- /dev/null +++ b/codegenerator/cli.py @@ -0,0 +1,344 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +import argparse +import importlib +import importlib.util +import inspect +import logging +from pathlib import Path +import re +import sys + +from openstack import resource +from sphinx import pycode +import yaml + +from codegenerator.ansible import AnsibleGenerator +from codegenerator import common +from codegenerator.jsonschema import JsonSchemaGenerator +from codegenerator.metadata import MetadataGenerator +from codegenerator.openapi_spec import OpenApiSchemaGenerator +from codegenerator.osc import OSCGenerator +from codegenerator.rust_cli import RustCliGenerator +from codegenerator.rust_sdk import RustSdkGenerator +from codegenerator.types import Metadata + + +class ResourceProcessor: + def __init__(self, mod_name, class_name): + self.mod_name = mod_name + self.class_name = class_name + self.class_plural_name = ( + class_name + "s" if class_name[:-1] != "y" else "ies" + ) + + spec = importlib.util.find_spec(self.mod_name) + if not spec: + raise RuntimeError("Module %s not found" % self.mod_name) + self.module = importlib.util.module_from_spec(spec) + if not self.module: + raise RuntimeError("Error loading module %s" % self.mod_name) + sys.modules[self.mod_name] = self.module + if not spec.loader: + raise RuntimeError("No module loader available") + spec.loader.exec_module(self.module) + self.resource_class = getattr(self.module, self.class_name) + + # Get resource proxy + srv_ver_mod, _, _ = self.mod_name.rpartition(".") + proxy_mod_name = srv_ver_mod + "._proxy" + proxy_spec = importlib.util.find_spec(proxy_mod_name) + if not proxy_spec: + raise RuntimeError("Module %s not found" % proxy_mod_name) + self.proxy_mod = importlib.util.module_from_spec(proxy_spec) + if not self.proxy_mod: + raise RuntimeError("Error loading module %s" % proxy_mod_name) + sys.modules[proxy_mod_name] = self.proxy_mod + if not proxy_spec.loader: + raise RuntimeError("No module loader available") + proxy_spec.loader.exec_module(self.proxy_mod) + self.proxy_obj = getattr(self.proxy_mod, "Proxy") + self.srv_ver_mod = srv_ver_mod + + self.service_name = self.mod_name.split(".")[1] + self.fqcn = f"{self.mod_name}.{self.class_name}" + + # Find the resource registry name + for k, v in self.proxy_obj._resource_registry.items(): + if ( + hasattr(v, "__module__") + and v.__module__ == self.mod_name + and v.__name__ == self.class_name + ): + self.registry_name = f"{self.service_name}.{k}" + + self.attrs = dict() + self.process() + + def process(self): + attr_docs = self.get_attr_docs() + for k, v in self.body_attrs(): + doc = attr_docs.get(k) + if doc: + doc = re.sub("\\*Type: .*\\*", "", doc) + doc = doc.rstrip() + if not doc and k == "name": + doc = "Name" + elif not doc and k == "tags": + doc = f"{self.class_name} Tags." + self.attrs[k] = dict(attr=v, docs=doc) + + def get_attr_docs(self): + mod = pycode.ModuleAnalyzer.for_module(self.mod_name) + mod.analyze() + result = {} + for k, v in mod.attr_docs.items(): + if k[0] == self.class_name: + result[k[1]] = " ".join(v) + if "id" not in result: + result["id"] = "Id of the resource" + return result + + def body_attrs(self): + for attr in inspect.getmembers(self.resource_class): + if isinstance(attr[1], resource.Body): + yield attr + + +class Generator: + schemas: dict = {} + metadata: Metadata + + def get_openapi_spec(self, path: Path): + logging.debug("Fetch %s", path) + if path.as_posix() not in self.schemas: + self.schemas[path.as_posix()] = common.get_openapi_spec( + path.as_posix() + ) + return self.schemas[path.as_posix()] + + def load_metadata(self, path: Path): + with open(path, "r") as fp: + data = yaml.safe_load(fp) + self.metadata = Metadata(**data) + + +def main(): + parser = argparse.ArgumentParser( + description="Generate code from OpenStackSDK resource definitions" + ) + parser.add_argument( + "--module", + # required=True, + help="OpenStackSDK Module name (i.e. openstack.identity.v3.project)", + ) + parser.add_argument( + "--class-name", + # required=True, + help="OpenStackSDK Class name (under the specified module)", + ) + parser.add_argument( + "--target", + required=True, + choices=[ + "osc", + "ansible", + "rust-sdk", + "rust-cli", + "openapi-spec", + "jsonschema", + "metadata", + ], + help="Target for which to generate code", + ) + parser.add_argument( + "--work-dir", help="Working directory for the generated code" + ) + parser.add_argument( + "--alternative-module-path", + help=("Optional new module path"), + ) + parser.add_argument( + "--alternative-module-name", + help=("Optional new module name " "(rename get into list)"), + ) + parser.add_argument( + "--openapi-yaml-spec", + help=("Path to the OpenAPI spec file (yaml)"), + ) + parser.add_argument( + "--openapi-operation-id", + help=("OpenAPI operationID"), + ) + parser.add_argument( + "--service-type", + help=("Catalog service type"), + ) + + parser.add_argument( + "--api-version", + help=("Api version (used in path for resulting code, i.e. v1)"), + ) + + parser.add_argument( + "--metadata", + help=("Metadata file to load"), + ) + parser.add_argument( + "--service", + help=("Metadata service name filter"), + ) + parser.add_argument( + "--resource", + help=("Metadata resource name filter"), + ) + parser.add_argument( + "--validate", + action="store_true", + help=("Metadata resource name filter"), + ) + + generators = { + "osc": OSCGenerator(), + "ansible": AnsibleGenerator(), + "rust-sdk": RustSdkGenerator(), + "rust-cli": RustCliGenerator(), + "openapi-spec": OpenApiSchemaGenerator(), + "jsonschema": JsonSchemaGenerator(), + "metadata": MetadataGenerator(), + } + + for g, v in generators.items(): + v.get_parser(parser) + + args = parser.parse_args() + logging.basicConfig(level=logging.DEBUG) + generator = Generator() + + if args.metadata: + metadata_path = Path(args.metadata) + generator.load_metadata(metadata_path) + # Resulting mod_paths + res_mods = [] + + for res, res_data in generator.metadata.resources.items(): + if args.service and not res.startswith(args.service): + continue + if args.resource and res != f"{args.service}.{args.resource}": + continue + for op, op_data in res_data.operations.items(): + logging.debug(f"Processing operation {op_data.operation_id}") + if args.target in op_data.targets: + op_args = op_data.targets[args.target] + if not op_args.service_type: + op_args.service_type = res.split(".")[0] + if not op_args.api_version: + op_args.api_version = res_data.api_version + if not op_args.operation_type and op_data.operation_type: + op_args.operation_type = op_data.operation_type + # if not op_data.alternative_module_name and args.target == "rust-sdk": + + openapi_spec = generator.get_openapi_spec( + Path( + # metadata_path.parent, + op_data.spec_file + or res_data.spec_file, + ).resolve() + ) + + for mod_path, mod_name, path in generators[ + args.target + ].generate( + res, + args.work_dir, + openapi_spec=openapi_spec, + operation_id=op_data.operation_id, + args=op_args, + ): + res_mods.append((mod_path, mod_name, path)) + rust_sdk_extensions = res_data.extensions.get("rust-sdk") + if rust_sdk_extensions: + additional_modules = rust_sdk_extensions.setdefault( + "additional_modules", [] + ) + res_x = res.split(".") + for mod in additional_modules: + res_mods.append( + ( + [ + res_x[0].replace("-", "_"), + res_data.api_version, + res_x[1], + ], + mod, + "", + ) + ) + + if args.target == "rust-sdk" and not args.resource: + resource_results: dict[str, dict] = dict() + for mod_path, mod_name, path in res_mods: + mn = "/".join(mod_path) + x = resource_results.setdefault( + mn, {"path": path, "mods": set()} + ) + x["mods"].add(mod_name) + changed = True + while changed: + changed = False + for mod_path in [ + mod_path_str.split("/") + for mod_path_str in resource_results.keys() + ]: + if len(mod_path) < 3: + continue + mn = "/".join(mod_path[0:-1]) + mod_name = mod_path[-1] + if mn in resource_results: + if mod_name not in resource_results[mn]["mods"]: + resource_results[mn]["mods"].add(mod_name) + changed = True + else: + changed = True + x = resource_results.setdefault( + mn, {"path": path, "mods": set()} + ) + x["mods"].add(mod_name) + + for path, gen_data in resource_results.items(): + generators["rust-sdk"].generate_mod( + args.work_dir, + path.split("/"), + gen_data["mods"], + gen_data["path"], + res.split(".")[-1].capitalize(), + service_name=path.split("/")[0], + ) + exit(0) + + rp = None + if args.module and args.class_name: + rp = ResourceProcessor(args.module, args.class_name) + + generators[args.target].generate( + rp, + args.work_dir, + openapi_spec=None, + operation_id=args.openapi_operation_id, + args=args, + ) + + +if __name__ == "__main__": + main() diff --git a/codegenerator/common/__init__.py b/codegenerator/common/__init__.py new file mode 100644 index 0000000..a798cd7 --- /dev/null +++ b/codegenerator/common/__init__.py @@ -0,0 +1,443 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import logging +from pathlib import Path +from typing import Any +import re + +import jsonref +import yaml +from openapi_core import Spec +from pydantic import BaseModel + +VERSION_RE = re.compile(r"[Vv][0-9.]*") +# RE to split name from camelCase or by [`:`,`_`,`-`] +SPLIT_NAME_RE = re.compile(r"(?<=[a-z])(?=[A-Z])|:|_|-") + +# FullyQualifiedAttributeName alias map +FQAN_ALIAS_MAP = {"network.floatingip.floating_ip_address": "name"} + + +def _deep_merge( + dict1: dict[Any, Any], dict2: dict[Any, Any] +) -> dict[Any, Any]: + result = dict1.copy() + for key, value in dict2.items(): + if key in result: + if isinstance(result[key], dict) and isinstance(value, dict): + result[key] = _deep_merge(result[key], value) + continue + elif isinstance(result[key], list) and isinstance(value, list): + result[key] = result[key] + value + continue + result[key] = value + return result + + +class BasePrimitiveType(BaseModel): + lifetimes: set[str] | None = None + builder_macros: set[str] = set([]) + + +class BaseCombinedType(BaseModel): + """A Container Type (Array, Option)""" + + pass + + +class BaseCompoundType(BaseModel): + """A Complex Type (Enum/Struct)""" + + name: str + base_type: str + description: str | None = None + + +def get_openapi_spec(path: str | Path): + """Load OpenAPI spec from a file""" + with open(path, "r") as fp: + spec_data = jsonref.replace_refs(yaml.safe_load(fp), proxies=False) + return Spec.from_dict(spec_data) + + +def find_openapi_operation(spec, operationId: str): + """Find operation by operationId in the loaded spec""" + for path, path_spec in spec["paths"].items(): + for method, method_spec in path_spec.items(): + if not isinstance(method_spec, dict): + continue + if method_spec.get("operationId") == operationId: + return (path, method, method_spec) + raise RuntimeError("Cannot find operation %s specification" % operationId) + + +def get_plural_form(resource: str) -> str: + """Get plural for of the resource + + Apply rules from https://www.fluentu.com/blog/english/plural-nouns/ to + build a plural form of the word + """ + if resource[-1] == "y": + return resource[0:-1] + "ies" + elif resource[-1] == "o": + return resource + "es" + elif resource[-2:] == "is": + return resource[0:-2] + "es" + elif resource[-1] in ["s", "x", "z"] or resource[-2:] in ["sh", "ch"]: + return resource + "es" + elif resource[-1] == "f": + return resource[:-1] + "ves" + elif resource[-2:] == "fe": + return resource[:-2] + "ves" + else: + return resource + "s" + + +def get_singular_form(resource: str) -> str: + """Get singular for of the resource + + Apply reverse rules from + https://www.fluentu.com/blog/english/plural-nouns/ to build a singular + plural form of the word keeping certain hacks + """ + if resource[-3:] == "ves": + # impossible to reverse elf -> elves and knife -> knives + return resource[0:-3] + "fe" + elif resource[-3:] == "ies": + return resource[0:-3] + "y" + elif resource[-4:] == "sses": + return resource[0:-2] + elif resource[-2:] == "es": + if resource[-4:-2] in ["sh", "ch"] or resource[-3] in ["s", "x", "z"]: + return resource[0:-2] + else: + # it is impossible to reverse axis => axes + return resource[0:-2] + else: + return resource[:-1] + + +def find_resource_schema( + schema: dict, parent: str | None = None, resource_name: str | None = None +) -> tuple[dict | None, str | None]: + """Find the actual resource schema in the body schema + + Traverse through the body schema searching for an element that represent + the resource itself. + + a) root is an object and it contain property with the resource name + b) root is an object and it contain array property with name equals to + the plural form of the resource name + + :returns: tuple of (schema, attribute name) for the match or (None, None) + if not found + + """ + try: + if "type" not in schema: + # Response of server create is a server or reservation_id + # if "oneOf" in schema: + # kinds = {} + # for kind in schema["oneOf"]: + # kinds.update(kind) + # schema["type"] = kinds["type"] + if "allOf" in schema: + # {'allOf': [ + # {'type': 'integer', 'minimum': 0}, + # {'default': 0}] + # } + kinds = {} + for kind in schema["allOf"]: + kinds.update(kind) + schema["type"] = kinds["type"] + elif schema == {}: + return (None, None) + elif "properties" in schema: + schema["type"] = "object" + else: + raise RuntimeError("No type in %s" % schema) + schema_type = schema["type"] + if schema_type == "array": + if ( + parent + and resource_name + and parent == get_plural_form(resource_name) + ): + items = schema["items"] + if ( + items.get("type") == "object" + and resource_name in items.get("properties", []) + and len(items.get("properties", []).keys()) == 1 + ): + # Most likely this is Keypair where we have keypairs.keypair.{} + return (items["properties"][resource_name], parent) + else: + return (items, parent) + elif ( + not parent and schema.get("items", {}).get("type") == "object" + ): + # Array on the top level. Most likely we are searching for items + # directly + return (schema["items"], None) + return find_resource_schema( + schema.get("items", {"type": "string"}), + parent, + resource_name=resource_name, + ) + elif schema_type == "object": + props = ( + schema.properties + if hasattr(schema, "properties") + else schema.get("properties", {}) + ) + if not parent and resource_name in props: + # we are at the top level and there is property with the + # resource name - it is what we are searching for + el_type = props[resource_name]["type"] + if el_type == "array": + return (props[resource_name]["items"], resource_name) + return (props[resource_name], resource_name) + for name, item in props.items(): + if name == "additionalProperties" and isinstance(item, bool): + # Some schemas are broken + continue + (r, path) = find_resource_schema(item, name, resource_name) + if r: + return (r, path) + if not parent: + # We are on top level and have not found anything. + keys = list(props.keys()) + if len(keys) == 1: + # there is only one field in the object + if props[keys[0]].get("type") == "object": + # and it is itself an object + return (props[keys[0]], keys[0]) + else: + # only field is not an object + return (schema, None) + else: + return (schema, None) + except Exception as ex: + logging.exception( + f"Caught exception {ex} during processing of {schema}" + ) + raise + return (None, None) + + +def find_response_schema( + responses: dict, response_key: str, action_name: str | None = None +): + """Locate response schema + + Some operations are having variety of possible responses (depending on + microversion, action, etc). Try to locate suitable response for the client. + + The function iterates over all defined responses and for 2** appies the + following logic: + + - if action_name is present AND oneOf is present AND action_name is in one + of the oneOf schemas -> return this schema + + - if action_name is not present AND oneOf is present AND response_key is in + one of the OneOf candidates' properties (this is an object) -> return it + + - action_name is not present AND oneOf is not present and (response_key or + plural of the response_key) in candidate -> return it + + :param dict responses: Dictionary with responses as defined in OpenAPI spec + :param str response_key: Response key to be searching in responses (when + aciton_name is not given) :param str action_name: Action name to be + searching response for + """ + for code, rspec in responses.items(): + if not code.startswith("2"): + continue + content = rspec.get("content", {}) + if "application/json" in content: + response_spec = content["application/json"] + schema = response_spec["schema"] + oneof = schema.get("oneOf") + discriminator = schema.get("x-openstack", {}).get("discriminator") + if oneof: + if not discriminator: + # Server create returns server or reservation info. For the + # cli it is not very helpful and we look for response + # candidate with the resource_name in the response + for candidate in oneof: + if ( + action_name + and candidate.get("x-openstack", {}).get( + "action-name" + ) + == action_name + ): + if response_key in candidate.get("properties", {}): + # If there is a object with resource_name in + # the props - this must be what we want to look + # at + return candidate["properties"][response_key] + else: + return candidate + elif ( + not action_name + and response_key + and candidate.get("type") == "object" + and response_key in candidate.get("properties", {}) + ): + # Actually for the sake of the CLI it may make + # sense to merge all candidates + return candidate["properties"][response_key] + else: + raise NotImplementedError + elif ( + not action_name + and schema + and ( + response_key in schema + or ( + schema.get("type") == "object" + and ( + response_key in schema.get("properties", []) + or get_plural_form(response_key) + in schema.get("properties", []) + ) + ) + ) + ): + return schema + if not action_name: + # Could not find anything with the given response_key. If there is any + # 200/204 response - return it + for code in ["200", "204"]: + if code in responses: + schema = ( + responses[code] + .get("content", {}) + .get("application/json", {}) + .get("schema") + ) + if schema and "type" in schema: + return schema + return None + + +def get_resource_names_from_url(path: str): + """Construct Resource name from the URL""" + path_elements = list(filter(None, path.split("/"))) + if path_elements and VERSION_RE.match(path_elements[0]): + path_elements.pop(0) + path_resource_names = [] + + for path_element in path_elements: + if "{" not in path_element: + el = path_element.replace("-", "_") + if el[-3:] == "ies": + part = el[0:-3] + "y" + elif el[-4:] == "sses": + part = el[0:-2] + elif ( + el[-1] == "s" + and el[-3:] != "dns" + and el[-6:] != "access" + and el != "qos" + # quota/details + and el != "details" + ): + part = el[0:-1] + else: + part = el + if part.startswith("os_"): + # We should remove `os_` prefix from resource name + part = part[3:] + path_resource_names.append(part) + if len(path_resource_names) > 1 and ( + path_resource_names[-1] + in [ + "action", + "detail", + ] + or "add" in path_resource_names[-1] + or "remove" in path_resource_names[-1] + or "update" in path_resource_names[-1] + ): + path_resource_names.pop() + if len(path_resource_names) == 0: + return ["version"] + if path.startswith("/v2/schemas/"): + # Image schemas should not be singularized (schema/images, + # schema/image) + path_resource_names[-1] = path_elements[-1] + if path.startswith("/v2/images") and path.endswith("/actions/deactivate"): + path_resource_names = ["image"] + if path.startswith("/v2/images") and path.endswith("/actions/reactivate"): + path_resource_names = ["image"] + if path_resource_names == ["volume_transfer", "accept"]: + path_resource_names = ["volume_transfer"] + + return path_resource_names + + +def get_rust_sdk_mod_path(service_type: str, api_version: str, path: str): + """Construct mod path for rust sdk""" + mod_path = [ + service_type.replace("-", "_"), + api_version, + ] + mod_path.extend([x.lower() for x in get_resource_names_from_url(path)]) + return mod_path + + +def get_rust_cli_mod_path(service_type: str, api_version: str, path: str): + """Construct mod path for rust sdk""" + mod_path = [ + service_type.replace("-", "_"), + api_version, + ] + mod_path.extend([x.lower() for x in get_resource_names_from_url(path)]) + return mod_path + + +def get_rust_service_type_from_str(xtype: str): + match xtype: + case "block-storage": + return "BlockStorage" + case "block_storage": + return "BlockStorage" + case "compute": + return "Compute" + case "identity": + return "Identity" + case "image": + return "Image" + case "network": + return "Network" + case "object-store": + return "ObjectStore" + case _: + return xtype + + +def make_ascii_string(description: str | None) -> str | None: + """Make sure a string is a valid ASCII charset + + Placing a text with Unicode chars into the generated code may cause a lot + of code sanity checks violations. Replace all known Unicode chars with + something reasonable and return a pure ASCII string + """ + if not description: + return None + # PlusMinus - https://unicodeplus.com/U+00B1 + description = description.replace("\u00b1", "+-") + + return description diff --git a/codegenerator/common/rust.py b/codegenerator/common/rust.py new file mode 100644 index 0000000..09bd3db --- /dev/null +++ b/codegenerator/common/rust.py @@ -0,0 +1,1135 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import logging +import re +from typing import Type, Any, Generator, Tuple + +from pydantic import BaseModel + +from codegenerator.common import BasePrimitiveType +from codegenerator.common import BaseCombinedType +from codegenerator.common import BaseCompoundType +from codegenerator import model +from codegenerator import common + + +class Boolean(BasePrimitiveType): + """Basic Boolean""" + + type_hint: str = "bool" + imports: set[str] = set([]) + clap_macros: set[str] = set(["action=clap::ArgAction::Set"]) + original_data_type: BaseCompoundType | BaseCompoundType | None = None + + def get_sample(self): + return "false" + + +class Number(BasePrimitiveType): + format: str | None = None + imports: set[str] = set([]) + clap_macros: set[str] = set() + original_data_type: BaseCompoundType | BaseCompoundType | None = None + + @property + def type_hint(self): + if self.format == "float": + return "f32" + elif self.format == "double": + return "f64" + else: + return "f32" + + def get_sample(self): + return "123" + + +class Integer(BasePrimitiveType): + format: str | None = None + imports: set[str] = set([]) + clap_macros: set[str] = set() + original_data_type: BaseCompoundType | BaseCompoundType | None = None + + @property + def type_hint(self): + if self.format == "int32": + return "i32" + elif self.format == "int64": + return "i64" + return "i32" + + def get_sample(self): + return "123" + + +class Null(BasePrimitiveType): + type_hint: str = "Value" + imports: set[str] = set(["serde_json::Value"]) + builder_macros: set[str] = set([]) + clap_macros: set[str] = set() + original_data_type: BaseCompoundType | BaseCompoundType | None = None + + def get_sample(self): + return "Value::Null" + + +class String(BasePrimitiveType): + format: str | None = None + type_hint: str = "String" + builder_macros: set[str] = set(["setter(into)"]) + + # NOTE(gtema): it is not possible to override field with computed + # property, thus it must be a property here + @property + def imports(self) -> set[str]: + return set([]) + + def get_sample(self): + return '"foo"' + + +class JsonValue(BasePrimitiveType): + type_hint: str = "Value" + imports: set[str] = set(["serde_json::Value"]) + builder_macros: set[str] = set(["setter(into)"]) + + def get_sample(self): + return "json!({})" + + +class Option(BaseCombinedType): + base_type: str = "Option" + item_type: BasePrimitiveType | BaseCombinedType | BaseCompoundType + original_data_type: BaseCompoundType | BaseCompoundType | None = None + + @property + def type_hint(self): + return f"Option<{self.item_type.type_hint}>" + + @property + def lifetimes(self): + return self.item_type.lifetimes + + @property + def imports(self): + return self.item_type.imports + + @property + def builder_macros(self): + macros = set(["setter(into)"]) + wrapped_macros = self.item_type.builder_macros + if "private" in wrapped_macros: + macros = wrapped_macros + return macros + + @property + def clap_macros(self): + return self.item_type.clap_macros + + def get_sample(self): + return self.item_type.get_sample() + + +class Array(BaseCombinedType): + base_type: str = "vec" + item_type: BasePrimitiveType | BaseCombinedType | BaseCompoundType + + @property + def type_hint(self): + return f"Vec<{self.item_type.type_hint}>" + + @property + def lifetimes(self): + return self.item_type.lifetimes + + @property + def imports(self): + return self.item_type.imports + + @property + def builder_macros(self): + macros = set(["setter(into)"]) + return macros + + def get_sample(self): + return ( + "Vec::from([" + + self.item_type.get_sample() + + (".into()" if isinstance(self.item_type, String) else "") + + "])" + ) + + @property + def clap_macros(self) -> set[str]: + return self.item_type.clap_macros + + +class CommaSeparatedList(BaseCombinedType): + item_type: BasePrimitiveType | BaseCombinedType | BaseCompoundType + + @property + def type_hint(self): + return f"CommaSeparatedList<{self.item_type.type_hint}>" + + @property + def lifetimes(self): + return self.item_type.lifetimes + + @property + def imports(self): + imports: set[str] = set([]) + imports.update(self.item_type.imports) + return imports + + @property + def clap_macros(self) -> set[str]: + return set() + + +class BTreeSet(BaseCombinedType): + item_type: BasePrimitiveType | BaseCombinedType | BaseCompoundType + builder_macros: set[str] = set(["setter(into)"]) + + @property + def type_hint(self): + return f"BTreeSet<{self.item_type.type_hint}>" + + @property + def lifetimes(self): + return self.item_type.lifetimes + + @property + def imports(self): + imports = self.item_type.imports + imports.add("std::collections::BTreeSet") + return imports + + +class Dictionary(BaseCombinedType): + base_type: str = "dict" + value_type: BasePrimitiveType | BaseCombinedType | BaseCompoundType + + +class StructField(BaseModel): + local_name: str + remote_name: str + description: str | None = None + data_type: BasePrimitiveType | BaseCombinedType | BaseCompoundType + is_optional: bool = True + is_nullable: bool = False + + @property + def type_hint(self): + typ_hint = self.data_type.type_hint + if self.is_optional: + typ_hint = f"Option<{typ_hint}>" + return typ_hint + + +class Struct(BaseCompoundType): + base_type: str = "struct" + fields: dict[str, StructField] = {} + field_type_class_: Type[StructField] | StructField = StructField + additional_fields_type: ( + BasePrimitiveType | BaseCombinedType | BaseCompoundType | None + ) = None + + @property + def type_hint(self): + return self.name + ( + f"<{', '.join(self.lifetimes)}>" if self.lifetimes else "" + ) + + @property + def imports(self): + imports: set[str] = set([]) + field_types = [x.data_type for x in self.fields.values()] + if len(field_types) > 1 or ( + len(field_types) == 1 + and not isinstance(field_types[0], Null) + and not isinstance(field_types[0], Dictionary) + and not isinstance(field_types[0], Array) + ): + # We use structure only if it is not consisting from only Null + imports.add("serde::Deserialize") + imports.add("serde::Serialize") + for field_type in field_types: + imports.update(field_type.imports) + if self.additional_fields_type: + imports.add("std::collections::BTreeMap") + imports.update(self.additional_fields_type.imports) + return imports + + @property + def lifetimes(self): + lifetimes_: set[str] = set() + for field in self.fields.values(): + if field.data_type.lifetimes: + lifetimes_.update(field.data_type.lifetimes) + return lifetimes_ + + @property + def clap_macros(self) -> set[str]: + return set() + + +class EnumKind(BaseModel): + name: str + description: str | None = None + data_type: BasePrimitiveType | BaseCombinedType | BaseCompoundType + + @property + def type_hint(self): + if isinstance(self.data_type, Struct): + return self.data_type.name + self.data_type.static_lifetime + return self.data_type.type_hint + + @property + def clap_macros(self) -> set[str]: + return set() + + +class Enum(BaseCompoundType): + base_type: str = "enum" + kinds: dict[str, EnumKind] + literals: list[Any] | None = None + original_data_type: BaseCompoundType | BaseCompoundType | None = None + _kind_type_class = EnumKind + + @property + def type_hint(self): + return self.name + ( + f"<{', '.join(self.lifetimes)}>" if self.lifetimes else "" + ) + + @property + def imports(self): + imports: set[str] = set() + imports.add("serde::Deserialize") + imports.add("serde::Serialize") + for kind in self.kinds.values(): + imports.update(kind.data_type.imports) + return imports + + @property + def lifetimes(self): + lifetimes_: set[str] = set() + for kind in self.kinds.values(): + if kind.data_type.lifetimes: + lifetimes_.update(kind.data_type.lifetimes) + return lifetimes_ + + @property + def clap_macros(self) -> set[str]: + return set() + + +class StringEnum(BaseCompoundType): + base_type: str = "enum" + variants: dict[str, set[str]] = {} + imports: set[str] = set(["serde::Deserialize", "serde::Serialize"]) + lifetimes: set[str] = set() + derive_container_macros: str = ( + "#[derive(Debug, Deserialize, Clone, Serialize)]" + ) + builder_container_macros: str | None = None + serde_container_macros: str | None = None # "#[serde(untagged)]" + serde_macros: set[str] | None = None + original_data_type: BaseCompoundType | BaseCompoundType | None = None + + @property + def type_hint(self): + """Get type hint""" + return self.name + + @property + def clap_macros(self) -> set[str]: + """Return clap macros""" + return set() + + @property + def builder_macros(self) -> set[str]: + """Return builder macros""" + return set() + + def get_sample(self): + """Generate sample data""" + variant = list(sorted(self.variants.keys()))[0] + return f"{self.name}::{variant}" + + def variant_serde_macros(self, variant: str): + """Return serde macros""" + macros = set([]) + vals = self.variants[variant] + if len(vals) > 1: + macros.add(f'rename(serialize = "{sorted(vals)[0]}")') + for val in vals: + macros.add(f'alias="{val}"') + else: + macros.add(f'rename = "{list(vals)[0]}"') + return "#[serde(" + ", ".join(sorted(macros)) + ")]" + + +class RequestParameter(BaseModel): + """OpenAPI request parameter in the Rust SDK form""" + + remote_name: str + local_name: str + location: str + data_type: BaseCombinedType | BasePrimitiveType | BaseCompoundType + description: str | None = None + is_required: bool = False + is_flag: bool = False + setter_name: str | None = None + setter_type: str | None = None + + @property + def type_hint(self): + if not self.is_required and not isinstance(self.data_type, BTreeSet): + return f"Option<{self.data_type.type_hint}>" + return self.data_type.type_hint + + @property + def lifetimes(self): + return self.data_type.lifetimes + + +class TypeManager: + """Rust type manager + + The class is responsible for converting ADT models into types suitable + for Rust. + """ + + models: list = [] + refs: dict[ + model.Reference, + BasePrimitiveType | BaseCombinedType | BaseCompoundType, + ] = {} + parameters: dict[str, Type[RequestParameter] | RequestParameter] = {} + + #: Base mapping of the primitive data-types + base_primitive_type_mapping: dict[ + Type[model.PrimitiveType], + Type[BasePrimitiveType] | Type[BaseCombinedType], + ] = { + model.PrimitiveString: String, + model.ConstraintString: String, + model.PrimitiveNumber: Number, + model.ConstraintNumber: Number, + model.ConstraintInteger: Integer, + model.PrimitiveBoolean: Boolean, + model.PrimitiveNull: Null, + model.PrimitiveAny: JsonValue, + } + + #: Extension for primitives data-type mapping + primitive_type_mapping: dict[ + Type[model.PrimitiveType], + Type[BasePrimitiveType] | Type[BaseCombinedType], + ] + + #: Extensions of the data-type mapping + data_type_mapping: dict[ + Type[model.ADT], Type[BaseCombinedType] | Type[BaseCompoundType] + ] + #: Base data-type mapping + base_data_type_mapping: dict[ + Type[model.ADT], Type[BaseCombinedType] | Type[BaseCompoundType] + ] = { + model.Dictionary: Dictionary, + model.Enum: Enum, + model.Struct: Struct, + model.Array: Array, + model.CommaSeparatedList: CommaSeparatedList, + model.Set: BTreeSet, + } + #: RequestParameter Type class + request_parameter_class: Type[RequestParameter] = RequestParameter + + #: Option Type class + option_type_class: Type[Option] | Option = Option + #: StringEnum Type class + string_enum_class: Type[StringEnum] | StringEnum = StringEnum + + #: List of the models to be ignored + ignored_models: list[model.Reference] = [] + + def __init__(self): + self.models = [] + self.refs = {} + self.parameters = {} + + # Set base mapping entries into the data_type_mapping + for k, v in self.base_primitive_type_mapping.items(): + if k not in self.primitive_type_mapping: + self.primitive_type_mapping[k] = v + + for k, v in self.base_data_type_mapping.items(): + if k not in self.data_type_mapping: + self.data_type_mapping[k] = v + + def get_local_attribute_name(self, name: str) -> str: + """Get localized attribute name""" + name = name.replace(".", "_") + attr_name = "_".join( + x.lower() for x in re.split(common.SPLIT_NAME_RE, name) + ) + if attr_name in ["type", "self", "enum", "ref"]: + attr_name = f"_{attr_name}" + return attr_name + + def get_remote_attribute_name(self, name: str) -> str: + """Get remote attribute name + + This method can be used on the client side to be able to override + remote attribute name as a local name on the SDK side. + """ + return name + + def get_model_name(self, model_ref: model.Reference | None) -> str: + """Get the localized model type name""" + if not model_ref: + return "Request" + name = "".join( + x.capitalize() + for x in re.split(common.SPLIT_NAME_RE, model_ref.name) + ) + return name + + def _get_adt_by_reference(self, model_ref): + for model_ in self.models: + if model_.reference == model_ref: + return model_ + raise RuntimeError("Cannot find reference %s" % model_ref) + + def convert_model( + self, + type_model: model.PrimitiveType | model.ADT | model.Reference, + ) -> BasePrimitiveType | BaseCombinedType | BaseCompoundType: + """Get local destination type from the ModelType""" + # logging.debug("Get RustSDK type for %s", type_model) + typ: BasePrimitiveType | BaseCombinedType | BaseCompoundType | None = ( + None + ) + model_ref: model.Reference | None = None + if isinstance(type_model, model.Reference): + model_ref = type_model + type_model = self._get_adt_by_reference(type_model) + elif isinstance(type_model, model.ADT): + # Direct composite type + model_ref = type_model.reference + else: + # Primitive + xtyp = self.primitive_type_mapping.get(type_model.__class__) + if not xtyp: + raise RuntimeError("No mapping for %s" % type_model) + return xtyp(**type_model.model_dump()) + + # Composite/Compound type + if model_ref and model_ref in self.refs: + return self.refs[model_ref] + if isinstance(type_model, model.Array): + typ = self._get_array_type(type_model) + elif isinstance(type_model, model.Struct): + typ = self._get_struct_type(type_model) + elif isinstance(type_model, model.OneOfType): + typ = self._get_one_of_type(type_model) + elif isinstance(type_model, model.Dictionary): + typ = self.data_type_mapping[model.Dictionary]( + value_type=self.convert_model(type_model.value_type) + ) + elif isinstance(type_model, model.CommaSeparatedList): + typ = self.data_type_mapping[model.CommaSeparatedList]( + item_type=self.convert_model(type_model.item_type) + ) + elif isinstance(type_model, model.Set): + typ = self.data_type_mapping[model.Set]( + item_type=self.convert_model(type_model.item_type) + ) + elif isinstance(type_model, model.Enum): + if len(type_model.base_types) > 1: + if model.PrimitiveBoolean in type_model.base_types: + # enum literals supporting also bools are most likely + # bool + string -> just keep bool on the Rust side + typ = Boolean() + else: + raise RuntimeError( + f"Rust model does not support multitype enums yet {type_model}" + ) + elif len(type_model.base_types) == 1: + base_type = type_model.base_types[0] + if base_type is model.ConstraintString: + variants: dict[str, set[str]] = {} + try: + if None in type_model.literals: + # TODO(gtema): make parent nullable or add "null" + # as enum value + type_model.literals.remove(None) + for lit in set(x.lower() for x in type_model.literals): + val = "".join( + [ + x.capitalize() + for x in re.split( + common.SPLIT_NAME_RE, lit + ) + ] + ) + if val and val[0].isdigit(): + val = "_" + val + vals = variants.setdefault(val, set()) + for orig_val in type_model.literals: + if orig_val.lower() == lit: + vals.add(orig_val) + + typ = self.string_enum_class( + name=self.get_model_name(type_model.reference), + variants=variants, + ) + except Exception: + logging.exception( + "Error processing enum: %s", type_model + ) + elif base_type is model.ConstraintInteger: + typ = self.primitive_type_mapping[ + model.ConstraintInteger + ]() + elif base_type is model.PrimitiveBoolean: + typ = self.primitive_type_mapping[model.PrimitiveBoolean]() + + if not typ: + raise RuntimeError( + "Cannot map model type %s to Rust type [%s]" + % (type_model.__class__.__name__, type_model) + ) + + if not model_ref: + model_ref = model.Reference(name="Body", type=typ.__class__) + self.refs[model_ref] = typ + return typ + + def _get_array_type(self, type_model: model.Array) -> Array: + """Convert `model.Array` into corresponding Rust SDK model""" + return self.data_type_mapping[model.Array]( + name=self.get_model_name(type_model.reference), + item_type=self.convert_model(type_model.item_type), + ) + + def _get_one_of_type( + self, type_model: model.OneOfType + ) -> BaseCompoundType | BaseCombinedType | BasePrimitiveType: + """Convert `model.OneOfType` into Rust model""" + kinds: list[dict] = [] + is_nullable: bool = False + result_data_type = None + for kind in type_model.kinds: + if isinstance(kind, model.PrimitiveNull): + # Remove null from candidates and instead wrap with Option + is_nullable = True + continue + kind_type = self.convert_model(kind) + is_type_already_present = False + for processed_kind_type in kinds: + if ( + isinstance(kind_type, BasePrimitiveType) + and processed_kind_type["local"] == kind_type + ): + logging.debug( + "Simplifying oneOf with same mapped type %s [%s]", + kind, + type_model, + ) + is_type_already_present = True + break + if not is_type_already_present: + kinds.append( + { + "model": kind, + "local": kind_type, + "class": kind_type.__class__, + } + ) + + # Simplify certain oneOf combinations + self._simplify_oneof_combinations(type_model, kinds) + + if len(kinds) == 2: + list_type = [ + x["local"] + for x in kinds + if x["class"] == self.data_type_mapping[model.Array] + ] + if list_type: + lt: BaseCombinedType = list_type[0] + # Typ + list[Typ] => Vec<Typ> + item_type = [ + x["local"] + for x in kinds + if x["class"] != self.data_type_mapping[model.Array] + ][0] + if item_type.__class__ == lt.item_type.__class__: + result_data_type = self.data_type_mapping[model.Array]( + item_type=item_type, + description=sanitize_rust_docstrings( + type_model.description + ), + ) + # logging.debug("Replacing Typ + list[Typ] with list[Typ]") + elif len(kinds) == 1: + result_data_type = kinds[0]["local"] + + if not result_data_type: + enum_class = self.data_type_mapping[model.Enum] + result_data_type = enum_class( + name=self.get_model_name(type_model.reference), kinds={} + ) + cnt: int = 0 + for kind_data in kinds: + cnt += 1 + kind_data_type = kind_data["local"] + kind_description: str | None = None + if isinstance(kind_data["model"], model.ADT): + kind_name = self.get_model_name(kind_data["model"]) + kind_description = kind_data["model"].description + else: + kind_name = f"F{cnt}" + enum_kind = enum_class._kind_type_class( + name=kind_name, + description=sanitize_rust_docstrings(kind_description), + data_type=kind_data_type, + ) + result_data_type.kinds[enum_kind.name] = enum_kind + + if is_nullable: + result_data_type = self.option_type_class( + item_type=result_data_type + ) + + return result_data_type + + def _get_struct_type(self, type_model: model.Struct) -> Struct: + """Convert model.Struct into Rust `Struct`""" + struct_class = self.data_type_mapping[model.Struct] + mod = struct_class( + name=self.get_model_name(type_model.reference), + description=sanitize_rust_docstrings(type_model.description), + ) + field_class = mod.field_type_class_ + for field_name, field in type_model.fields.items(): + is_nullable: bool = False + field_data_type = self.convert_model(field.data_type) + if isinstance(field_data_type, self.option_type_class): + # Unwrap Option into "is_nullable" NOTE: but perhaps + # Option<Option> is better (not set vs set explicitly to None + # ) + is_nullable = True + if isinstance(field_data_type.item_type, Array): + # Unwrap Option<Option<Vec...>> + field_data_type = field_data_type.item_type + f = field_class( + local_name=self.get_local_attribute_name(field_name), + remote_name=self.get_remote_attribute_name(field_name), + description=sanitize_rust_docstrings(field.description), + data_type=field_data_type, + is_optional=not field.is_required, + is_nullable=is_nullable, + ) + mod.fields[field_name] = f + if type_model.additional_fields: + definition = type_model.additional_fields + # Structure allows additional fields + if isinstance(definition, bool): + mod.additional_fields_type = self.primitive_type_mapping[ + model.PrimitiveAny + ] + else: + mod.additional_fields_type = self.convert_model(definition) + return mod + + def _simplify_oneof_combinations(self, type_model, kinds): + """Simplify certain known oneOf combinations""" + kinds_classes = [x["class"] for x in kinds] + string_klass = self.primitive_type_mapping[model.ConstraintString] + number_klass = self.primitive_type_mapping[model.ConstraintNumber] + integer_klass = self.primitive_type_mapping[model.ConstraintInteger] + boolean_klass = self.primitive_type_mapping[model.PrimitiveBoolean] + dict_klass = self.data_type_mapping[model.Dictionary] + enum_name = type_model.reference.name if type_model.reference else None + if string_klass in kinds_classes and number_klass in kinds_classes: + # oneOf [string, number] => string + for typ in list(kinds): + if typ["class"] == number_klass: + kinds.remove(typ) + elif string_klass in kinds_classes and integer_klass in kinds_classes: + if enum_name and ( + enum_name.endswith("size") or enum_name.endswith("count") + ): + # XX_size or XX_count is clearly an integer + for typ in list(kinds): + if typ["class"] == string_klass: + kinds.remove(typ) + else: + # oneOf [string, integer] => string + # Reason: compute.server.flavorRef is string or integer. For + # simplicity keep string + for typ in list(kinds): + if typ["class"] == integer_klass: + kinds.remove(typ) + elif string_klass in kinds_classes and boolean_klass in kinds_classes: + # oneOf [string, boolean] => boolean + for typ in list(kinds): + if typ["class"] == string_klass: + kinds.remove(typ) + elif string_klass in kinds_classes and dict_klass in kinds_classes: + # oneOf [string, dummy object] => JsonValue + # Simple string can be easily represented by JsonValue + for c in kinds: + # Discard dict + self.ignored_models.append(c["model"]) + kinds.clear() + jsonval_klass = self.primitive_type_mapping[model.PrimitiveAny] + kinds.append({"local": jsonval_klass(), "class": jsonval_klass}) + elif len(set(kinds_classes)) == 1 and string_klass in kinds_classes: + # in the output oneOf of same type (but maybe different formats) + # makes no sense + # Example is server addresses which are ipv4 or ipv6 + bck = kinds[0].copy() + kinds.clear() + kinds.append(bck) + + def set_models(self, models): + """Process (translate) ADT models into Rust SDK style""" + self.models = models + self.refs = {} + self.ignored_models = [] + unique_model_names: set[str] = set() + for model_ in models: + model_data_type = self.convert_model(model_) + if not isinstance(model_data_type, BaseCompoundType): + continue + name = getattr(model_data_type, "name", None) + if name and name in unique_model_names: + # There is already a model with this name. Try adding suffix from datatype name + new_name = name + model_data_type.__class__.__name__ + if new_name not in unique_model_names: + # New name is still unused + model_data_type.name = new_name + unique_model_names.add(new_name) + elif isinstance(model_data_type, Struct): + # This is already an exceptional case (identity.mapping + # with remote being oneOf with multiple structs) + # Try to make a name consisting of props + props = model_data_type.fields.keys() + new_new_name = name + "".join( + x.title() for x in props + ).replace("_", "") + if new_new_name not in unique_model_names: + for other_ref, other_model in self.refs.items(): + other_name = getattr(other_model, "name", None) + if not other_name: + continue + if other_name in [ + name, + new_name, + ] and isinstance(other_model, Struct): + # rename first occurence to the same scheme + props = other_model.fields.keys() + new_other_name = name + "".join( + x.title() for x in props + ).replace("_", "") + other_model.name = new_other_name + unique_model_names.add(new_other_name) + + model_data_type.name = new_new_name + unique_model_names.add(new_new_name) + else: + raise RuntimeError( + "Model name %s is already present" % new_name + ) + else: + raise RuntimeError( + "Model name %s is already present" % new_name + ) + elif name: + unique_model_names.add(name) + + for ignore_model in self.ignored_models: + self.discard_model(ignore_model) + + def get_subtypes(self): + """Get all subtypes excluding TLA""" + for k, v in self.refs.items(): + if ( + k + and isinstance(v, (Enum, Struct, StringEnum)) + and k.name != "Body" + ): + yield v + elif ( + k + and k.name != "Body" + and isinstance(v, self.option_type_class) + ): + if isinstance(v.item_type, Enum): + yield v.item_type + + def get_root_data_type(self): + """Get TLA type""" + for k, v in self.refs.items(): + if not k or (k.name == "Body" and isinstance(v, Struct)): + if isinstance(v.fields, dict): + # There might be tuple Struct (with + # fields as list) + field_names = list(v.fields.keys()) + if ( + len(field_names) == 1 + and v.fields[field_names[0]].is_optional + ): + # A body with only field can not normally be optional + logging.warning( + "Request body with single root field cannot be optional" + ) + v.fields[field_names[0]].is_optional = False + return v + elif not k or (k.name == "Body" and isinstance(v, Dictionary)): + # Response is a free style Dictionary + return v + # No root has been found, make a dummy one + root = self.data_type_mapping[model.Struct](name="Request") + return root + + def get_imports(self): + """Get complete set of additional imports required by all models in scope""" + imports: set[str] = set() + imports.update(self.get_root_data_type().imports) + for subt in self.get_subtypes(): + imports.update(subt.imports) + # for item in self.refs.values(): + # imports.update(item.imports) + for param in self.parameters.values(): + imports.update(param.data_type.imports) + return imports + + def get_request_static_lifetimes(self, request_model: Struct): + """Return static lifetimes of the Structure""" + lifetimes = request_model.lifetimes + for param in self.parameters.values(): + lt = param.lifetimes + if lt: + lifetimes.update(lt) + if lifetimes: + return f"<{', '.join(lifetimes)}>" + return "" + + def subtype_requires_private_builders(self, subtype) -> bool: + """Return `True` if type require private builder""" + if not isinstance(subtype, self.data_type_mapping[model.Struct]): + return False + for field in subtype.fields.values(): + if "private" in field.builder_macros: + return True + if isinstance(subtype, Struct) and subtype.additional_fields_type: + return True + return False + + def set_parameters(self, parameters: list[model.RequestParameter]) -> None: + """Set OpenAPI operation parameters into typemanager for conversion""" + for parameter in parameters: + data_type = self.convert_model(parameter.data_type) + param = self.request_parameter_class( + remote_name=self.get_remote_attribute_name(parameter.name), + local_name=self.get_local_attribute_name(parameter.name), + data_type=data_type, + location=parameter.location, + description=sanitize_rust_docstrings(parameter.description), + is_required=parameter.is_required, + is_flag=parameter.is_flag, + ) + self.parameters[param.local_name] = param + + def get_parameters( + self, location: str + ) -> Generator[Tuple[str, Type[RequestParameter]], None, None]: + """Get parameters by location""" + for k, v in self.parameters.items(): + if v.location == location: + yield (k, v) + + def discard_model( + self, + type_model: model.PrimitiveType | model.ADT | model.Reference, + ): + """Discard model from the manager""" + logging.debug(f"Request to discard {type_model}") + if isinstance(type_model, model.Reference): + type_model = self._get_adt_by_reference(type_model) + if not hasattr(type_model, "reference"): + return + for ref, data in list(self.refs.items()): + if ref == type_model.reference: + sub_ref: model.Reference | None = None + if ref.type == model.Struct: + logging.debug( + "Element is a struct. Purging also field types" + ) + # For struct type we cascadely discard all field types as + # well + for v in type_model.fields.values(): + if isinstance(v.data_type, model.Reference): + sub_ref = v.data_type + else: + sub_ref = getattr(v.data_type, "reference", None) + if sub_ref: + logging.debug(f"Need to purge also {sub_ref}") + self.discard_model(sub_ref) + elif ref.type == model.OneOfType: + logging.debug( + "Element is a OneOf. Purging also kinds types" + ) + for v in type_model.kinds: + if isinstance(v, model.Reference): + sub_ref = v + else: + sub_ref = getattr(v, "reference", None) + if sub_ref: + logging.debug(f"Need to purge also {sub_ref}") + self.discard_model(sub_ref) + elif ref.type == model.Array: + logging.debug( + f"Element is a Array. Purging also item type {type_model.item_type}" + ) + if isinstance(type_model.item_type, model.Reference): + sub_ref = type_model.item_type + else: + sub_ref = getattr( + type_model.item_type, "reference", None + ) + if sub_ref: + logging.debug(f"Need to purge also {sub_ref}") + self.discard_model(sub_ref) + logging.debug(f"Purging {ref} from models") + self.refs.pop(ref, None) + + +def sanitize_rust_docstrings(doc: str | None) -> str | None: + """Sanitize the string to be a valid rust docstring""" + if not doc: + return None + code_block_open: bool = False + lines: list[str] = [] + for line in doc.split("\n"): + if line.endswith("```"): + if not code_block_open: + # Rustdoc defaults to rust code for code blocks. To prevent + # this explicitly add `text` + code_block_open = True + line = line + "text" + else: + code_block_open = False + lines.append(line) + return "\n".join(lines) + + +def get_operation_variants(spec: dict, operation_name: str): + request_body = spec.get("requestBody") + # List of operation variants (based on the body) + operation_variants = [] + + if request_body: + content = request_body.get("content", {}) + json_body_schema = content.get("application/json", {}).get("schema") + if json_body_schema: + mime_type = "application/json" + # response_def = json_body_schema + if "oneOf" in json_body_schema: + # There is a choice of bodies. It can be because of + # microversion or an action (or both) + # For action we should come here with operation_type="action" and operation_name must be the action name + # For microversions we build body as enum + # So now try to figure out what the discriminator is + discriminator = json_body_schema.get("x-openstack", {}).get( + "discriminator" + ) + if discriminator == "microversion": + logging.debug("Microversion discriminator for bodies") + for variant in json_body_schema["oneOf"]: + variant_spec = variant.get("x-openstack", {}) + operation_variants.append( + {"body": variant, "mime_type": mime_type} + ) + # operation_variants.extend([{"body": x} for x in json_body_schema(["oneOf"])]) + elif discriminator == "action": + # We are in the action. Need to find matching body + for variant in json_body_schema["oneOf"]: + variant_spec = variant.get("x-openstack", {}) + if variant_spec.get("action-name") == operation_name: + discriminator = variant_spec.get("discriminator") + if ( + "oneOf" in variant + and discriminator == "microversion" + ): + logging.debug( + "Microversion discriminator for action bodies" + ) + for subvariant in variant["oneOf"]: + subvariant_spec = subvariant.get( + "x-openstack", {} + ) + operation_variants.append( + { + "body": subvariant, + "mode": "action", + "min-ver": subvariant_spec.get( + "min-ver" + ), + "mime_type": mime_type, + } + ) + else: + logging.debug( + "Action %s with %s", variant, discriminator + ) + operation_variants.append( + { + "body": variant, + "mode": "action", + "min-ver": variant_spec.get("min-ver"), + "mime_type": mime_type, + } + ) + break + if not operation_variants: + raise RuntimeError( + "Cannot find body specification for action %s" + % operation_name + ) + else: + operation_variants.append( + {"body": json_body_schema, "mime_type": mime_type} + ) + elif "application/octet-stream" in content: + mime_type = "application/octet-stream" + operation_variants.append({"mime_type": mime_type}) + elif "application/openstack-images-v2.1-json-patch" in content: + mime_type = "application/openstack-images-v2.1-json-patch" + operation_variants.append({"mime_type": mime_type}) + elif "application/json-patch+json" in content: + mime_type = "application/json-patch+json" + operation_variants.append({"mime_type": mime_type}) + elif content == {}: + operation_variants.append({"body": None}) + else: + # Explicitly register variant without body + operation_variants.append({"body": None}) + + return operation_variants diff --git a/codegenerator/common/schema.py b/codegenerator/common/schema.py new file mode 100644 index 0000000..d6b1034 --- /dev/null +++ b/codegenerator/common/schema.py @@ -0,0 +1,154 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +from typing import Any, Dict, List, Optional + +from pydantic import BaseModel, ConfigDict, Field + +# from openapi_core import Spec + + +class TypeSchema(BaseModel): + # TODO(gtema): enums are re-shuffled on every serialization + model_config = ConfigDict(extra="allow", populate_by_name=True) + + type: Optional[str | List[str]] = None + format: Optional[str] = None + description: Optional[str] = None + summary: str | None = None + default: Optional[Any] = None + items: Optional[Dict[str, Any]] = None + # circular reference cause issues on deserializing + properties: Optional[Dict[str, Any]] = None + nullable: Optional[bool] = None + additionalProperties: Optional[bool | Any] = None + + ref: Optional[str] = Field(alias="$ref", default=None) + oneOf: Optional[List[Any]] = Field(default=None) + anyOf: Optional[List[Any]] = Field(default=None) + openstack: Optional[Dict[str, Any]] = Field( + alias="x-openstack", default=None + ) + required: Optional[List[str]] = None + pattern: Optional[str] = None + maxLength: Optional[int] = None + + @classmethod + def openapi_type_from_sdk(cls, type_name, fallback_type): + if type_name in ["string", "str"]: + return {"type": "string"} + elif type_name == "int": + return {"type": "integer"} + elif type_name == "bool": + return {"type": "boolean"} + elif type_name == "dict": + return {"type": "object"} + elif type_name == "list": + return {"type": "array"} + else: + # This is a fallback. Maybe we should define those objects + return {"type": fallback_type} + + @classmethod + def from_sdk_field(cls, field, fallback_type="object"): + property_schema_attrs = {} + if field.type: + field_type = getattr(field.type, "__name__", "string") + else: + field_type = "string" + + property_schema_attrs.update( + cls.openapi_type_from_sdk(field_type, fallback_type) + ) + if field_type == "list": + item_type = getattr(field, "list_type") + item_type_str = getattr(item_type, "__name__", "string") + property_schema_attrs["items"] = cls.openapi_type_from_sdk( + item_type_str, fallback_type + ) + + return cls(**property_schema_attrs) + + +class ParameterSchema(BaseModel): + model_config = ConfigDict(extra="allow", populate_by_name=True) + + location: str = Field(alias="in", default=None) + name: str | None = None + description: str | None = None + type_schema: TypeSchema = Field(alias="schema", default=None) + required: bool = False + deprecated: bool = False + style: str | None = None + explode: bool | None = None + ref: str = Field(alias="$ref", default=None) + openstack: Dict[str, Any] = Field(alias="x-openstack", default=None) + + def get_sdk_name(self): + return self.sdk_name or self.name + + +class OperationSchema(BaseModel): + model_config = ConfigDict(extra="allow", populate_by_name=True) + + parameters: List[ParameterSchema] = [] + description: str | None = None + operationId: str | None = None + requestBody: dict = {} + responses: Dict[str, dict] = {} + tags: List[str] = list() + deprecated: bool | None = None + openstack: dict = Field(alias="x-openstack", default={}) + security: List | None = None + + +class HeaderSchema(BaseModel): + model_config = ConfigDict(extra="allow", populate_by_name=True) + + description: Optional[str] = None + openstack: Optional[Dict[str, Any]] = Field( + alias="x-openstack", default=None + ) + schema: Optional[TypeSchema] = Field(default=None) + + +class PathSchema(BaseModel): + model_config = ConfigDict(extra="allow", populate_by_name=True) + + parameters: List[ParameterSchema] = [] + get: OperationSchema = OperationSchema() + post: OperationSchema = OperationSchema() + delete: OperationSchema = OperationSchema() + put: OperationSchema = OperationSchema() + patch: OperationSchema = OperationSchema() + head: OperationSchema = OperationSchema() + + +class ComponentsSchema(BaseModel): + model_config = ConfigDict(extra="allow", populate_by_name=True) + + schemas: Dict[str, TypeSchema] = {} + parameters: Dict[str, ParameterSchema] = {} + headers: Dict[str, HeaderSchema] = {} + + +class SpecSchema(BaseModel): + class Config: + pupulate_by_name = True + extra = "allow" + + openapi: str + info: dict + paths: Dict[str, PathSchema] = {} + components: ComponentsSchema = ComponentsSchema() + tags: List[Dict] = [] + security: List[Dict] = [] diff --git a/codegenerator/jsonschema.py b/codegenerator/jsonschema.py new file mode 100644 index 0000000..9b89340 --- /dev/null +++ b/codegenerator/jsonschema.py @@ -0,0 +1,85 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +import json +import logging + +from codegenerator.base import BaseGenerator +from codegenerator.common.schema import TypeSchema + + +class JsonSchemaGenerator(BaseGenerator): + """Generate jsonschema from the SDK resource""" + + def __init__(self): + super().__init__() + + def _build_resource_schema(self, res): + # resource = res.resource_class + properties = {} + for k, v in res.attrs.items(): + field = v["attr"] + properties[field.name] = TypeSchema.from_sdk_field( + field + ).model_dump( + exclude_none=True, exclude_defaults=True, by_alias=True + ) + if "docs" in v: + properties[field.name]["description"] = v["docs"] + if k != field.name: + properties[field.name]["x-openstack-sdk-name"] = k + if k in [ + "created_at", + "updated_at", + "deleted_at", + "id", + "status", + "trunk_details", + ]: + properties[field.name]["readOnly"] = True + if k.startswith("min") or k.startswith("max") or "count" in k: + properties[field.name]["type"] = "integer" + if res.resource_class.resource_key: + properties = { + res.resource_class.resource_key: { + "type": "object", + "properties": properties, + } + } + schema = TypeSchema( + type="object", properties=properties, description="" + ) + # if res.resource_class._store_unknown_attrs_as_properties: + # schema_attrs["additionalProperties"] = True + # schema_attrs["properties"] = properties + return schema + + def generate( + self, res, target_dir, openapi_spec=None, operation_id=None, args=None + ): + """Generate Json Schema definition file for Resource""" + logging.debug("Generating OpenAPI schema data") + # We do not import generators since due to the use of Singletons in the + # code importing glance, nova, cinder at the same time crashes + # dramatically + schema = self._build_resource_schema(res) + print( + json.dumps( + json.loads( + schema.model_dump_json( + exclude_none=True, exclude_defaults=True, by_alias=True + ) + ), + indent=4, + ) + ) diff --git a/codegenerator/metadata.py b/codegenerator/metadata.py new file mode 100644 index 0000000..3c33b1e --- /dev/null +++ b/codegenerator/metadata.py @@ -0,0 +1,740 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +from pathlib import Path +import logging +import re + +import jsonref +from ruamel.yaml import YAML + +from codegenerator.base import BaseGenerator +from codegenerator import common +from codegenerator.common.schema import SpecSchema +from codegenerator.types import Metadata +from codegenerator.types import OperationModel +from codegenerator.types import OperationTargetParams +from codegenerator.types import ResourceModel + + +class MetadataGenerator(BaseGenerator): + """Generate metadata from OpenAPI spec""" + + def load_openapi(self, path): + """Load existing OpenAPI spec from the file""" + if not path.exists(): + return + yaml = YAML(typ="safe") + with open(path, "r") as fp: + spec = jsonref.replace_refs(yaml.load(fp)) + + return SpecSchema(**spec) + + def generate( + self, res, target_dir, openapi_spec=None, operation_id=None, args=None + ): + """Generate Json Schema definition file for Resource""" + logging.debug("Generating OpenAPI schema data") + # We do not import generators since due to the use of Singletons in the + # code importing glance, nova, cinder at the same time crashes + # dramatically + spec_path = Path(args.openapi_yaml_spec) + metadata_path = Path(target_dir, args.service_type + "_metadata.yaml") + + schema = self.load_openapi(spec_path) + openapi_spec = common.get_openapi_spec(spec_path) + metadata = Metadata(resources=dict()) + api_ver = "v" + schema.info["version"].split(".")[0] + for path, spec in schema.paths.items(): + path_elements: list[str] = path.split("/") + resource_name = "/".join( + [x for x in common.get_resource_names_from_url(path)] + ) + if args.service_type == "object-store": + if path == "/v1/{account}": + resource_name = "account" + elif path == "/v1/{account}/{container}": + resource_name = "container" + if path == "/v1/{account}/{object}": + resource_name = "object" + if args.service_type == "compute" and resource_name in [ + "agent", + "baremetal_node", + "cell", + "cell/capacity", + "cell/info", + "cell/sync_instance", + "certificate", + "cloudpipe", + "fping", + "fixed_ip", + "floating_ip_dns", + "floating_ip_dns/entry", + "floating_ip_pool", + "floating_ip_bulk", + "host", + "host/reboot", + "host/shutdown", + "host/startup", + "image", + "image/metadata", + "network", + "security_group_default_rule", + "security_group_rule", + "security_group", + "server/console", + "server/virtual_interface", + "snapshot", + "tenant_network", + "volume", + "volumes_boot", + ]: + # We do not need to produce anything for deprecated APIs + continue + resource_model = metadata.resources.setdefault( + f"{args.service_type}.{resource_name}", + ResourceModel( + api_version=api_ver, + spec_file=spec_path.as_posix(), + operations=dict(), + ), + ) + for method in [ + "head", + "get", + "put", + "post", + "delete", + "options", + "patch", + ]: + operation = getattr(spec, method, None) + if operation: + if not operation.operationId: + # Every operation must have operationId + continue + op_model = OperationModel( + operation_id=operation.operationId, targets=dict() + ) + operation_key: str | None = None + + response_schema: dict | None = None + for code, rsp in operation.responses.items(): + if code.startswith("2"): + response_schema = ( + rsp.get("content", {}) + .get("application/json", {}) + .get("schema", {}) + ) + break + if path.endswith("}"): + if method == "get": + operation_key = "show" + elif method == "head": + operation_key = "check" + elif method == "put": + operation_key = "update" + elif method == "patch": + if ( + "application/json" + in operation.requestBody.get("content", {}) + ): + operation_key = "update" + else: + operation_key = "patch" + elif method == "post": + operation_key = "create" + elif method == "delete": + operation_key = "delete" + elif path.endswith("/detail"): + if method == "get": + operation_key = "list_detailed" + # elif path.endswith("/default"): + # operation_key = "default" + elif path == "/v2/images/{image_id}/file": + if method == "put": + operation_key = "upload" + elif method == "get": + operation_key = "download" + else: + raise NotImplementedError + elif path == "/v3/users/{user_id}/password": + if method == "post": + operation_key = "update" + elif ( + args.service_type == "compute" + and resource_name == "flavor/flavor_access" + and method == "get" + ): + operation_key = "list" + elif ( + args.service_type == "compute" + and resource_name == "aggregate/image" + and method == "post" + ): + operation_key = "action" + elif ( + args.service_type == "compute" + and resource_name == "server/security_group" + and method == "get" + ): + operation_key = "list" + elif ( + args.service_type == "compute" + and resource_name == "server/topology" + and method == "get" + ): + operation_key = "list" + + elif response_schema and ( + method == "get" + and ( + response_schema.get("type", "") == "array" + or ( + response_schema.get("type", "") == "object" + and "properties" in response_schema + and len(path_elements) > 1 + and path_elements[-1] + in response_schema.get("properties", {}) + ) + ) + ): + # Response looks clearly like a list + operation_key = "list" + elif path.endswith("/action"): + # Action + operation_key = "action" + elif args.service_type == "image" and path.endswith( + "/actions/deactivate" + ): + operation_key = "deactivate" + elif args.service_type == "image" and path.endswith( + "/actions/reactivate" + ): + operation_key = "reactivate" + elif ( + args.service_type == "block-storage" + and "volume-transfers" in path + and path.endswith("}/accept") + ): + operation_key = "accept" + elif ( + len( + [ + x + for x in schema.paths.keys() + if x.startswith(path + "/{") + ] + ) + > 0 + ): + # if we are at i.e. /v2/servers and there is + # /v2/servers/{ most likely we are at the collection + # level + if method == "get": + operation_key = "list" + elif method == "head": + operation_key = "check" + elif method == "patch": + if ( + "application/json" + in operation.requestBody.get("content", {}) + ): + operation_key = "update" + else: + operation_key = "patch" + elif method == "post": + operation_key = "create" + elif method == "put": + operation_key = "replace" + elif method == "delete": + operation_key = "delete_all" + elif method == "head": + operation_key = "check" + elif method == "get": + operation_key = "get" + elif method == "post": + operation_key = "create" + elif method == "put": + operation_key = path.split("/")[-1] + elif method == "patch": + if "application/json" in operation.requestBody.get( + "content", {} + ): + operation_key = "update" + else: + operation_key = "patch" + elif method == "delete": + operation_key = "delete" + if not operation_key: + logging.warn( + f"Cannot identify op name for {path}:{method}" + ) + + # Next hacks + if args.service_type == "identity" and resource_name in [ + "OS_FEDERATION/identity_provider", + "OS_FEDERATION/identity_provider/protocol", + "OS_FEDERATION/mapping", + "OS_FEDERATION/service_provider", + ]: + if method == "put": + operation_key = "create" + elif method == "patch": + operation_key = "update" + + if operation_key in resource_model: + raise RuntimeError("Operation name conflict") + else: + if ( + operation_key == "action" + and args.service_type + in [ + "compute", + "block-storage", + ] + ): + # For action we actually have multiple independent operations + try: + body_schema = operation.requestBody["content"][ + "application/json" + ]["schema"] + bodies = body_schema.get( + "oneOf", [body_schema] + ) + if len(bodies) > 1: + discriminator = body_schema.get( + "x-openstack", {} + ).get("discriminator") + if discriminator != "action": + raise RuntimeError( + "Cannot generate metadata for %s since request body is not having action discriminator" + % path + ) + for body in bodies: + action_name = body.get( + "x-openstack", {} + ).get("action-name") + if not action_name: + action_name = list( + body["properties"].keys() + )[0] + # Hardcode fixes + if ( + resource_name == "flavor" + and action_name + in [ + "update", + "create", + "delete", + ] + ): + # Flavor update/create/delete + # operations are exposed ALSO as wsgi + # actions. This is wrong and useless. + logging.warn( + "Skipping generating %s:%s action", + resource_name, + action_name, + ) + continue + + operation_name = "-".join( + x.lower() + for x in re.split( + common.SPLIT_NAME_RE, action_name + ) + ).lower() + rust_sdk_params = ( + get_rust_sdk_operation_args( + "action", + operation_name=action_name, + module_name=get_module_name( + action_name + ), + ) + ) + rust_cli_params = ( + get_rust_cli_operation_args( + "action", + operation_name=action_name, + module_name=get_module_name( + action_name + ), + ) + ) + + op_model = OperationModel( + operation_id=operation.operationId, + targets=dict(), + ) + op_model.operation_type = "action" + + op_model.targets["rust-sdk"] = ( + rust_sdk_params + ) + op_model.targets["rust-cli"] = ( + rust_cli_params + ) + + op_model = post_process_operation( + args.service_type, + resource_name, + operation_name, + op_model, + ) + + resource_model.operations[ + operation_name + ] = op_model + + except KeyError: + raise RuntimeError( + "Cannot get bodies for %s" % path + ) + else: + if not operation_key: + raise NotImplementedError + operation_type = get_operation_type_by_key( + operation_key + ) + op_model.operation_type = operation_type + # NOTE: sdk gets operation_key and not operation_type + rust_sdk_params = get_rust_sdk_operation_args( + operation_key + ) + rust_cli_params = get_rust_cli_operation_args( + operation_key + ) + + op_model.targets["rust-sdk"] = rust_sdk_params + if rust_cli_params and not ( + args.service_type == "identity" + and operation_key == "check" + ): + op_model.targets["rust-cli"] = rust_cli_params + + op_model = post_process_operation( + args.service_type, + resource_name, + operation_key, + op_model, + ) + + resource_model.operations[operation_key] = op_model + pass + for res_name, res_data in metadata.resources.items(): + # Sanitize produced metadata + list_op = res_data.operations.get("list") + list_detailed_op = res_data.operations.get("list_detailed") + if list_op and list_detailed_op: + # There are both plain list and list with details operation. + # For the certain generator backend it makes no sense to have + # then both so we should disable generation of certain backends + # for the non detailed endpoint + list_op.targets.pop("rust-cli") + + # Prepare `find` operation data + if (list_op or list_detailed_op) and res_data.operations.get( + "show" + ): + show_op = res_data.operations["show"] + + (path, _, spec) = common.find_openapi_operation( + openapi_spec, show_op.operation_id + ) + mod_path = common.get_rust_sdk_mod_path( + args.service_type, + res_data.api_version or "", + path, + ) + response_schema = None + for code, rspec in spec.get("responses", {}).items(): + if not code.startswith("2"): + continue + content = rspec.get("content", {}) + if "application/json" in content: + try: + ( + response_schema, + _, + ) = common.find_resource_schema( + content["application/json"].get("schema", {}), + None, + ) + except Exception as ex: + logging.exception( + "Cannot process response of %s operation: %s", + show_op.operation_id, + ex, + ) + + if not response_schema: + # Show does not have a suitable + # response. We can't have find + # for such + continue + if "id" not in response_schema.get("properties", {}).keys(): + # Resource has no ID in show method => find impossible + continue + elif ( + "name" not in response_schema.get("properties", {}).keys() + and res_name != "floatingip" + ): + # Resource has no NAME => find useless + continue + + list_op_ = list_detailed_op or list_op + if not list_op_: + continue + (_, _, list_spec) = common.find_openapi_operation( + openapi_spec, list_op_.operation_id + ) + name_field: str = "name" + for fqan, alias in common.FQAN_ALIAS_MAP.items(): + if fqan.startswith(res_name) and alias == "name": + name_field = fqan.split(".")[-1] + name_filter_supported: bool = False + if name_field in [ + x.get("name") + for x in list(list_spec.get("parameters", [])) + ]: + name_filter_supported = True + + sdk_params = OperationTargetParams( + module_name="find", + name_field=name_field, + name_filter_supported=name_filter_supported, + sdk_mod_path="::".join(mod_path), + list_mod="list_detailed" if list_detailed_op else "list", + ) + res_data.operations["find"] = OperationModel( + operation_id=list_op_.operation_id, + operation_type="find", + targets={"rust-sdk": sdk_params}, + ) + + # Let other operations know of `find` presence + for op_name, op_data in res_data.operations.items(): + if op_name not in ["find", "list", "create"]: + for ( + target_name, + target_params, + ) in op_data.targets.items(): + if target_name in ["rust-cli"]: + target_params.find_implemented_by_sdk = True + + yaml = YAML() + yaml.preserve_quotes = True + yaml.default_flow_style = False + yaml.indent(mapping=2, sequence=4, offset=2) + metadata_path.parent.mkdir(exist_ok=True, parents=True) + with open(metadata_path, "w") as fp: + yaml.dump( + metadata.model_dump( + exclude_none=True, exclude_defaults=True, by_alias=True + ), + fp, + ) + + +def get_operation_type_by_key(operation_key): + if operation_key in ["list", "list_detailed"]: + return "list" + elif operation_key == "get": + return "get" + elif operation_key == "check": + return "get" + elif operation_key == "show": + return "show" + elif operation_key in ["update", "replace"]: + return "set" + elif operation_key in ["delete", "delete_all"]: + return "delete" + elif operation_key in ["create"]: + return "create" + elif operation_key == "patch": + return "set" + elif operation_key == "default": + return "get" + elif operation_key == "download": + return "download" + elif operation_key == "upload": + return "upload" + else: + return "action" + + +def get_rust_sdk_operation_args( + operation_key: str, + operation_name: str | None = None, + module_name: str | None = None, +): + """Construct proper Rust SDK parameters for operation by type""" + sdk_params = OperationTargetParams() + sdk_params.module_name = module_name + if operation_key == "show": + sdk_params.module_name = "get" + elif operation_key == "list_detailed": + sdk_params.module_name = "list_detailed" + # elif operation_key == "action" and not module_name: + # sdk_params.module_name = operation_name if operation_name else operation_key + else: + sdk_params.module_name = module_name or get_module_name( + # get_operation_type_by_key(operation_key) + operation_key + ) + sdk_params.operation_name = operation_name + + return sdk_params + + +def get_rust_cli_operation_args( + operation_key: str, + operation_name: str | None = None, + module_name: str | None = None, +): + """Construct proper Rust CLI parameters for operation by type""" + # Get SDK params to connect things with each other + # operation_type = get_operation_type_by_key(operation_key) + sdk_params = get_rust_sdk_operation_args( + operation_key, operation_name=operation_name, module_name=module_name + ) + cli_params = OperationTargetParams() + cli_params.sdk_mod_name = sdk_params.module_name + cli_params.module_name = module_name or get_module_name(operation_key) + cli_params.operation_name = operation_name + + return cli_params + + +def get_module_name(name): + if name in ["list", "list_detailed"]: + return "list" + elif name == "get": + return "get" + elif name == "show": + return "show" + elif name == "check": + return "head" + elif name == "update": + return "set" + elif name == "replace": + return "replace" + elif name == "delete": + return "delete" + elif name == "delete_all": + return "delete_all" + elif name in ["create"]: + return "create" + elif name in ["default"]: + return "default" + return "_".join(x.lower() for x in re.split(common.SPLIT_NAME_RE, name)) + + +def post_process_operation( + service_type: str, resource_name: str, operation_name: str, operation +): + if service_type == "compute": + operation = post_process_compute_operation( + resource_name, operation_name, operation + ) + elif service_type == "identity": + operation = post_process_identity_operation( + resource_name, operation_name, operation + ) + elif service_type == "image": + operation = post_process_image_operation( + resource_name, operation_name, operation + ) + elif service_type in ["block-storage", "volume"]: + operation = post_process_block_storage_operation( + resource_name, operation_name, operation + ) + return operation + + +def post_process_compute_operation( + resource_name: str, operation_name: str, operation +): + if resource_name == "aggregate": + if operation_name in ["set-metadata", "add-host", "remove-host"]: + operation.targets["rust-sdk"].response_key = "aggregate" + operation.targets["rust-cli"].response_key = "aggregate" + elif resource_name == "availability_zone": + if operation_name in ["get", "list_detailed"]: + operation.targets["rust-sdk"].response_key = "availabilityZoneInfo" + operation.targets["rust-cli"].response_key = "availabilityZoneInfo" + elif resource_name == "keypair": + if operation_name == "list": + operation.targets["rust-sdk"].response_list_item_key = "keypair" + elif resource_name == "server/instance_action": + if operation_name == "list": + operation.targets["rust-sdk"].response_key = "instanceActions" + operation.targets["rust-cli"].response_key = "instanceActions" + else: + operation.targets["rust-sdk"].response_key = "instanceAction" + operation.targets["rust-cli"].response_key = "instanceAction" + elif resource_name == "server/topology": + if operation_name == "list": + operation.targets["rust-sdk"].response_key = "nodes" + operation.targets["rust-cli"].response_key = "nodes" + elif resource_name == "server/volume_attachment": + if operation_name == "list": + operation.targets["rust-sdk"].response_key = "volumeAttachments" + operation.targets["rust-cli"].response_key = "volumeAttachments" + elif operation_name in ["create", "show", "update"]: + operation.targets["rust-sdk"].response_key = "volumeAttachment" + operation.targets["rust-cli"].response_key = "volumeAttachment" + + return operation + + +def post_process_identity_operation( + resource_name: str, operation_name: str, operation +): + if resource_name == "role/imply": + if operation_name == "list": + operation.targets["rust-cli"].response_key = "role_inference" + operation.targets["rust-sdk"].response_key = "role_inference" + if resource_name == "role_inference": + if operation_name == "list": + operation.targets["rust-cli"].response_key = "role_inferences" + operation.targets["rust-sdk"].response_key = "role_inferences" + return operation + + +def post_process_image_operation( + resource_name: str, operation_name: str, operation +): + if resource_name.startswith("schema"): + # Image schemas are a JSON operation + operation.targets["rust-cli"].operation_type = "json" + + return operation + + +def post_process_block_storage_operation( + resource_name: str, operation_name: str, operation +): + if resource_name == "type": + if operation_name == "list": + operation.targets["rust-cli"].response_key = "volume_types" + operation.targets["rust-sdk"].response_key = "volume_types" + elif operation_name in ["create", "show", "update"]: + operation.targets["rust-cli"].response_key = "volume_type" + operation.targets["rust-sdk"].response_key = "volume_type" + elif resource_name == "type/volume_type_access": + operation.targets["rust-cli"].response_key = "volume_type_access" + operation.targets["rust-sdk"].response_key = "volume_type_access" + + return operation diff --git a/codegenerator/model.py b/codegenerator/model.py new file mode 100644 index 0000000..96ec554 --- /dev/null +++ b/codegenerator/model.py @@ -0,0 +1,666 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import copy +import hashlib +import json +import logging +from typing import Any +from typing import Type +import typing as ty + +from pydantic import BaseModel + +from codegenerator import common + + +def dicthash_(data: dict[str, Any]) -> str: + """Calculate hash of the dictionary""" + dh = hashlib.md5() + encoded = json.dumps(data, sort_keys=True).encode() + dh.update(encoded) + return dh.hexdigest() + + +class Reference(BaseModel): + """Reference of the complex type to the occurence instance""" + + #: Name of the object that uses the type under reference + name: str + type: Type | None = None + hash_: str | None = None + + def __hash__(self): + return hash((self.name, self.type, self.hash_)) + + +class PrimitiveType(BaseModel): + """Primitive Data Type stricture""" + + pass + + +class PrimitiveString(PrimitiveType): + pass + + +class ConstraintString(PrimitiveType): + format: str | None = None + minLength: int | None = None + maxLength: int | None = None + pattern: str | None = None + enum: list[Any] | None = None + + +class PrimitiveNumber(PrimitiveType): + pass + + +class ConstraintNumber(PrimitiveNumber): + format: str | None = None + minimum: int | None = None + maximum: int | float | None = None + exclusiveMaximum: bool | None = None + multipleOf: int | float | None = None + + +class ConstraintInteger(ConstraintNumber): + pass + + +class PrimitiveBoolean(PrimitiveType): + pass + + +class PrimitiveNull(PrimitiveType): + pass + + +class PrimitiveAny(PrimitiveType): + pass + + +class ADT(BaseModel): + """Abstract Data Type / Composite - typically sort of + collection of Primitives""" + + reference: Reference | None = None + description: str | None = None + + +class AbstractList(ADT): + """Abstract list""" + + item_type: PrimitiveType | ADT | Reference + + +class AbstractCollection(ADT): + """AllOf/OneOf/etc""" + + pass + + +class AbstractContainer(ADT): + """Struct/Object""" + + pass + + +class OneOfType(ADT): + """OneOf - a collection of data types where only one of the kinds can be used (a.k.a. enum)""" + + kinds: list[PrimitiveType | ADT | Reference] = [] + + +class Enum(AbstractCollection): + """Enum: a unique collection of primitives""" + + base_types: list[Type[PrimitiveType]] = [] + literals: set[Any] = set() + + +class StructField(BaseModel): + """Structure field: type + additional info""" + + data_type: PrimitiveType | ADT | Reference + description: str | None = None + is_required: bool = False + min_ver: str | None = None + max_ver: str | None = None + + +class Struct(ADT): + """Struct/Object""" + + fields: dict[str, StructField] = {} + additional_fields: PrimitiveType | ADT | None = None + pattern_properties: dict[str, PrimitiveType | ADT] | None = None + + +class Dictionary(ADT): + """Simple dictionary with values of a single type""" + + value_type: PrimitiveType | ADT + + +class Array(AbstractList): + """A pure list""" + + pass + + +class CommaSeparatedList(AbstractList): + """A list that is serialized comma separated""" + + pass + + +class Set(AbstractList): + """A set of unique items""" + + pass + + +class JsonSchemaParser: + """JsonSchema to internal DataModel converter""" + + def parse( + self, schema, ignore_read_only: bool = False + ) -> ty.Tuple[ADT | None, list[ADT]]: + """Parse JsonSchema object into internal DataModel""" + results: list[ADT] = [] + res = self.parse_schema( + schema, results, ignore_read_only=ignore_read_only + ) + return (res, results) + + def parse_schema( + self, + schema, + results: list[ADT], + name: str | None = None, + parent_name: str | None = None, + min_ver: str | None = None, + max_ver: str | None = None, + ignore_read_only: bool | None = False, + ) -> PrimitiveType | ADT: + type_ = schema.get("type") + if "oneOf" in schema: + return self.parse_oneOf( + schema, + results, + name=name, + parent_name=parent_name, + ignore_read_only=ignore_read_only, + ) + elif "enum" in schema: + return self.parse_enum( + schema, + results, + name=name, + parent_name=parent_name, + ignore_read_only=ignore_read_only, + ) + elif "allOf" in schema: + return self.parse_allOf( + schema, + results, + name=name, + parent_name=parent_name, + ignore_read_only=ignore_read_only, + ) + elif isinstance(type_, list): + return self.parse_typelist( + schema, + results, + name=name, + parent_name=parent_name, + ignore_read_only=ignore_read_only, + ) + elif isinstance(type_, str): + if type_ == "object": + return self.parse_object( + schema, + results, + name=name, + parent_name=parent_name, + min_ver=min_ver, + max_ver=max_ver, + ignore_read_only=ignore_read_only, + ) + elif type_ == "array": + return self.parse_array( + schema, + results, + name=name, + parent_name=parent_name, + ignore_read_only=ignore_read_only, + ) + elif type_ == "string": + obj = ConstraintString(**schema) + # todo: set obj props + return obj + elif type_ == "integer": + obj = ConstraintInteger(**schema) + # todo: set obj props + return obj + elif type_ == "number": + obj = ConstraintNumber(**schema) + # todo: set obj props + return obj + elif type_ == "boolean": + obj = PrimitiveBoolean() + # todo: set obj props + return obj + elif type_ == "null": + obj = PrimitiveNull() + return obj + elif not type_ and "properties" in schema: + # Sometimes services forget to set "type=object" + return self.parse_object( + schema, + results, + name=name, + parent_name=parent_name, + min_ver=min_ver, + max_ver=max_ver, + ignore_read_only=ignore_read_only, + ) + elif schema == {}: + return PrimitiveNull() + elif not type_ and "format" in schema: + return ConstraintString(**schema) + raise RuntimeError("Cannot determine type for %s", schema) + + def parse_object( + self, + schema, + results: list[ADT], + name: str | None = None, + parent_name: str | None = None, + min_ver: str | None = None, + max_ver: str | None = None, + ignore_read_only: bool | None = False, + ): + obj: ADT | None = None + properties = schema.get("properties") + additional_properties = schema.get("additionalProperties") + additional_properties_type: PrimitiveType | ADT | None = None + pattern_properties = schema.get("patternProperties") + pattern_props: dict[str, PrimitiveType | ADT] | None = {} + required = schema.get("required", []) + os_ext: dict = schema.get("x-openstack", {}) + min_ver = os_ext.get("min-ver", min_ver) + max_ver = os_ext.get("max-ver", max_ver) + if properties: + obj = Struct() + for k, v in properties.items(): + if k == "additionalProperties" and isinstance(v, bool): + # Some schemas (in keystone) are Broken + continue + if ignore_read_only and v.get("readOnly", False): + continue + data_type = self.parse_schema( + v, + results, + name=k, + parent_name=name, + min_ver=min_ver, + max_ver=max_ver, + ignore_read_only=ignore_read_only, + ) + ref = getattr(data_type, "reference", None) + if ref: + field = StructField(data_type=ref) + else: + field = StructField( + data_type=data_type, + ) + + field.description = v.get("description") + if k in required: + field.is_required = True + if min_ver: + field.min_ver = min_ver + if max_ver: + field.max_ver = max_ver + obj.fields[k] = field + if additional_properties: + if ( + isinstance(additional_properties, dict) + and "type" in additional_properties + ): + additional_properties_type = self.parse_schema( + additional_properties, + results, + name=name, + min_ver=min_ver, + max_ver=max_ver, + ignore_read_only=ignore_read_only, + ) + else: + additional_properties_type = PrimitiveAny() + + if pattern_properties: + for key_pattern, value_type in pattern_properties.items(): + type_kind: PrimitiveType | ADT = self.parse_schema( + value_type, + results, + name=name, + min_ver=min_ver, + max_ver=max_ver, + ignore_read_only=ignore_read_only, + ) + pattern_props[key_pattern] = type_kind # type: ignore + + if obj: + if additional_properties_type: + obj.additional_fields = additional_properties_type + if pattern_props: + obj.pattern_properties = copy.deepcopy(pattern_props) + else: + if pattern_props and not additional_properties_type: + if len(list(pattern_props.values())) == 1: + obj = Dictionary( + value_type=list(pattern_props.values())[0] + ) + else: + obj = Struct(pattern_properties=pattern_props) + elif not pattern_props and additional_properties_type: + obj = Dictionary(value_type=additional_properties_type) + else: + obj = Dictionary(value_type=PrimitiveAny()) + if not obj: + raise RuntimeError("Object %s is not supported", schema) + + if name: + obj.reference = Reference( + name=name, type=obj.__class__, hash_=dicthash_(schema) + ) + + if obj: + obj.description = schema.get("description") + if ( + obj.reference + and f"{obj.reference.name}{obj.reference.type}" + in [ + f"{x.reference.name}{x.reference.type}" + for x in results + if x.reference + ] + ): + # Structure with the same name is already present. Prefix the + # new one with the parent name + if parent_name and name: + new_name = parent_name + "_" + name + + if Reference(name=new_name, type=obj.reference.type) in [ + x.reference for x in results + ]: + raise NotImplementedError + else: + obj.reference.name = new_name + results.append(obj) + return obj + + def parse_oneOf( + self, + schema, + results: list[ADT], + name: str | None = None, + parent_name: str | None = None, + ignore_read_only: bool | None = False, + ): + obj = OneOfType() + for kind in schema.get("oneOf"): + kind_schema = common._deep_merge(schema, kind) + kind_schema.pop("oneOf") + # todo: merge base props into the kind + kind_type = self.parse_schema( + kind_schema, + results, + name=name, + ignore_read_only=ignore_read_only, + ) + if not kind_type: + raise NotImplementedError + ref: Reference | None = getattr(kind_type, "reference", None) + if ref: + obj.kinds.append(ref) + else: + obj.kinds.append(kind_type) + if name: + obj.reference = Reference( + name=name, type=obj.__class__, hash_=dicthash_(schema) + ) + results.append(obj) + return obj + + def parse_typelist( + self, + schema, + results: list[ADT], + name: str | None = None, + parent_name: str | None = None, + ignore_read_only: bool | None = False, + ): + if len(schema.get("type")) == 1: + # Bad schema with type being a list of 1 entry + schema["type"] = schema["type"][0] + obj = self.parse_schema( + schema, + results, + name=name, + ignore_read_only=ignore_read_only, + ) + return obj + + obj = OneOfType() + + for kind_type in schema.get("type"): + kind_schema = copy.deepcopy(schema) + kind_schema["type"] = kind_type + kind_type = self.parse_schema( + kind_schema, + results, + name=name, + ignore_read_only=ignore_read_only, + ) + ref = getattr(kind_type, "reference", None) + if ref: + obj.kinds.append(ref) + else: + obj.kinds.append(kind_type) + if name: + obj.reference = Reference( + name=name, type=obj.__class__, hash_=dicthash_(schema) + ) + results.append(obj) + return obj + + def parse_array( + self, + schema, + results: list[ADT], + name: str | None = None, + parent_name: str | None = None, + ignore_read_only: bool | None = False, + ): + # todo: decide whether some constraints can be under items + item_type = self.parse_schema( + schema.get("items", {"type": "string"}), + results, + name=name, + ignore_read_only=ignore_read_only, + ) + ref = getattr(item_type, "reference", None) + if ref: + obj = Array(item_type=ref) + else: + obj = Array(item_type=item_type) + if name: + obj.reference = Reference( + name=name, type=obj.__class__, hash_=dicthash_(schema) + ) + results.append(obj) + return obj + + def parse_enum( + self, + schema, + results: list[ADT], + name: str | None = None, + parent_name: str | None = None, + ignore_read_only: bool | None = False, + ): + # todo: decide whether some constraints can be under items + literals = schema.get("enum") + obj = Enum(literals=literals, base_types=[]) + literal_types = set([type(x) for x in literals]) + for literal_type in literal_types: + if literal_type is str: + obj.base_types.append(ConstraintString) + elif literal_type is int: + obj.base_types.append(ConstraintInteger) + elif literal_type is bool: + obj.base_types.append(PrimitiveBoolean) + + if name: + obj.reference = Reference( + name=name, type=obj.__class__, hash_=dicthash_(schema) + ) + results.append(obj) + return obj + + def parse_allOf( + self, + schema, + results: list[ADT], + name: str | None = None, + parent_name: str | None = None, + ignore_read_only: bool | None = False, + ): + sch = copy.deepcopy(schema) + sch.pop("allOf") + for kind in schema.get("allOf"): + sch = common._deep_merge(sch, kind) + obj = self.parse_schema( + sch, results, name=name, ignore_read_only=ignore_read_only + ) + if not obj: + raise NotImplementedError + # if name: + # obj.reference = Reference(name=name, type=obj.__class__) + # results.append(obj) + return obj + + +class RequestParameter(BaseModel): + """OpenAPI Request parameter DataType wrapper""" + + name: str + location: str + data_type: PrimitiveType | ADT + description: str | None = None + is_required: bool = False + is_flag: bool = False + + +class OpenAPISchemaParser(JsonSchemaParser): + """OpenAPI to internal DataModel converter""" + + def parse_parameter(self, schema) -> RequestParameter: + """Parse OpenAPI request parameter into internal DataModel""" + param_name = schema.get("name") + param_location = schema.get("in") + param_schema = schema.get("schema") + param_typ = param_schema.get("type") + dt: PrimitiveType | ADT | None = None + if isinstance(param_typ, list) and "null" in param_typ: + param_typ.remove("null") + if len(param_typ) == 1: + param_typ = param_typ[0] + if param_typ == "string": + # NOTE: this is commented out so far since most of enums are just + # too wrong to treat them as enums here + # if "enum" in param_schema: + # dt = Enum(literals=param_schema["enum"], base_types=[ConstraintString]) + # else: + dt = ConstraintString(**param_schema) + elif param_typ == "number": + dt = ConstraintNumber(**param_schema) + elif param_typ == "integer": + dt = ConstraintInteger(**param_schema) + elif param_typ == "boolean": + dt = PrimitiveBoolean(**param_schema) + elif param_typ == "null": + dt = PrimitiveNull(**param_schema) + elif param_typ == "array": + try: + items_type = param_schema.get("items").get("type") + except Exception: + logging.exception("Broken array data: %s", param_schema) + raise + style = schema.get("style", "form") + explode = schema.get("explode", True) + if items_type == "string": + if style == "form" and not explode: + dt = CommaSeparatedList(item_type=ConstraintString()) + elif style == "form" and explode: + dt = Set(item_type=ConstraintString()) + else: + raise NotImplementedError( + "Parameter serialization %s not supported" % schema + ) + + elif isinstance(param_typ, list): + # Param type can be anything. Process supported combinations first + if param_location == "query" and param_name == "limit": + dt = ConstraintInteger(minimum=0) + elif param_location == "query" and sorted( + ["string", "boolean"] + ) == sorted(param_typ): + dt = PrimitiveBoolean() + elif param_location == "query" and sorted( + ["string", "integer"] + ) == sorted(param_typ): + dt = ConstraintInteger(**param_schema) + elif param_location == "query" and sorted( + ["string", "number"] + ) == sorted(param_typ): + dt = ConstraintNumber(**param_schema) + + if isinstance(dt, ADT): + # Set reference into the data_type so that it doesn't mess with main body types + dt.reference = Reference( + name=param_name, type=RequestParameter, hash_=dicthash_(schema) + ) + + is_flag: bool = False + os_ext = schema.get("x-openstack", {}) + if not isinstance(os_ext, dict): + raise RuntimeError(f"x-openstack must be a dictionary in {schema}") + if "is-flag" in os_ext: + is_flag = os_ext["is-flag"] + + if dt: + return RequestParameter( + name=param_name, + location=param_location, + data_type=dt, + description=schema.get("description"), + is_required=schema.get("required", False), + is_flag=is_flag, + ) + raise NotImplementedError("Parameter %s is not covered yet" % schema) + + raise RuntimeError("Parameter %s is not supported yet" % schema) diff --git a/codegenerator/openapi/base.py b/codegenerator/openapi/base.py new file mode 100644 index 0000000..4bfc627 --- /dev/null +++ b/codegenerator/openapi/base.py @@ -0,0 +1,1237 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import abc +import copy +import datetime +import importlib +import inspect +import logging +from pathlib import Path +from typing import Any +import re + +from codegenerator.common.schema import ParameterSchema +from codegenerator.common.schema import PathSchema +from codegenerator.common.schema import SpecSchema +from codegenerator.common.schema import TypeSchema +from openapi_core import Spec +from ruamel.yaml.scalarstring import LiteralScalarString +from ruamel.yaml import YAML +from wsme import types as wtypes + + +VERSION_RE = re.compile(r"[Vv][0-9.]*") + + +def get_referred_type_data(func, name: str): + """Get python type object referred by the function + + Return `some.object` for a function like: + + @wsgi.validation(some.object) + def foo(): + pass + + :param func: Function + :param str name: object name + """ + module = inspect.getmodule(func) + if module: + (mod, obj) = (None, None) + if "." in name: + (mod, obj) = name.split(".") + else: + raise RuntimeError('No "." in %s', name) + m = importlib.import_module(module.__name__) + if hasattr(m, mod): + mod = getattr(m, mod) + else: + raise RuntimeError("Cannot find attr %s", name) + if hasattr(mod, obj): + return getattr(mod, obj) + else: + raise RuntimeError("Cannot find definition for %s", name) + else: + raise RuntimeError("Cannot get module the function was defined in") + + +class OpenStackServerSourceBase: + # A URL to Operation tag (OpenApi group) mapping. Can be used when first + # non parameter path element grouping is not enough + # ("/qos/policies/{policy_id}/packet_rate_limit_rules" should be + # "qos-packet-rate-limit-rules" instead of "qos") + URL_TAG_MAP: dict[str, str] = {} + + def _api_ver_major(self, ver): + return ver.ver_major + + def _api_ver_minor(self, ver): + return ver.ver_minor + + def _api_ver(self, ver): + return (ver.ver_major, ver.ver_minor) + + def useFixture(self, fixture): + try: + fixture.setUp() + except Exception as ex: + logging.exception("Got exception", ex) + else: + return fixture + + @abc.abstractmethod + def generate(self, target_dir, args) -> Path: + pass + + def load_openapi(self, path): + """Load existing OpenAPI spec from the file""" + if not path.exists(): + return + yaml = YAML(typ="safe") + yaml.preserve_quotes = True + with open(path, "r") as fp: + spec = yaml.load(fp) + + return SpecSchema(**spec) + + def dump_openapi(self, spec, path, validate=False): + """Dump OpenAPI spec into the file""" + if validate: + self.validate_spec(spec) + yaml = YAML() + yaml.preserve_quotes = True + yaml.indent(mapping=2, sequence=4, offset=2) + with open(path, "w") as fp: + yaml.dump( + spec.model_dump( + exclude_none=True, exclude_defaults=True, by_alias=True + ), + fp, + ) + + def validate_spec(self, openapi_spec): + Spec.from_dict( + openapi_spec.model_dump( + exclude_none=True, exclude_defaults=True, by_alias=True + ) + ) + + def _sanitize_param_ver_info(self, openapi_spec, min_api_version): + # Remove min_version of params if it matches to min_api_version + for k, v in openapi_spec.components.parameters.items(): + os_ext = v.openstack + if os_ext: + if os_ext.get("min-ver") == min_api_version: + v.openstack.pop("min-ver") + if "max_ver" in os_ext and os_ext["max-ver"] is None: + v.openstack.pop("max-ver") + if os_ext == {}: + v.openstack = None + + def _process_route( + self, route, openapi_spec, ver_prefix=None, framework=None + ): + # Placement exposes "action" as controller in route defaults, all others - "controller" + if not ("controller" in route.defaults or "action" in route.defaults): + return + if "action" in route.defaults and "_methods" in route.defaults: + # placement 405 handler + return + # Path can be "/servers/{id}", but can be + # "/volumes/:volume_id/types/:(id)" - process + # according to the routes lib logic + path = ver_prefix if ver_prefix else "" + operation_spec = None + for part in route.routelist: + if isinstance(part, dict): + path += "{" + part["name"] + "}" + else: + path += part + + if path == "": + # placement has "" path - see weird explanation in the placement source code + return + + # if "method" not in route.conditions: + # raise RuntimeError("Method not set for %s", route) + method = ( + route.conditions.get("method", "GET")[0] + if route.conditions + else "GET" + ) + + controller = route.defaults.get("controller") + action = route.defaults.get("action") + logging.info( + "Path: %s; method: %s; operation: %s", path, method, action + ) + + versioned_methods = {} + controller_actions = {} + framework = None + if hasattr(controller, "controller"): + # wsgi + framework = "wsgi" + contr = controller.controller + if hasattr(contr, "versioned_methods"): + versioned_methods = contr.versioned_methods + if hasattr(contr, "wsgi_actions"): + controller_actions = contr.wsgi_actions + if hasattr(controller, "wsgi_actions"): + # Nova flavors mess with wsgi_action instead of normal operation + # and actions on the wrong controller + parent_controller_actions = controller.wsgi_actions + if parent_controller_actions: + controller_actions.update(parent_controller_actions) + elif hasattr(controller, "_pecan") or framework == "pecan": + # Pecan base app + framework = "pecan" + contr = controller + elif not controller and action and hasattr(action, "func"): + # Placement base app + framework = "placement" + controller = action + contr = action + action = None + else: + raise RuntimeError("Unsupported controller %s" % controller) + # logging.debug("Actions: %s, Versioned methods: %s", actions, versioned_methods) + + # path_spec = openapi_spec.paths.setdefault(path, PathSchema()) + + # operation_spec = dict() #= getattr(path_spec, method.lower()) # , {}) + # Get Path elements + path_elements: list[str] = list(filter(None, path.split("/"))) + if path_elements and VERSION_RE.match(path_elements[0]): + path_elements.pop(0) + operation_tags = self._get_tags_for_url(path) + + # Build path parameters (/foo/{foo_id}/bar/{id} => $foo_id, $foo_bar_id) + # Since for same path we are here multiple times check presence of + # parameter before adding new params + path_params: list[ParameterSchema] = [] + path_resource_names: list[str] = [] + for path_element in path_elements: + if "{" in path_element: + param_name = path_element.strip("{}") + global_param_name = ( + "_".join(path_resource_names) + f"_{param_name}" + ) + + param_ref_name = self._get_param_ref( + openapi_spec, + global_param_name, + param_name, + param_location="path", + path=path, + ) + # Ensure reference to the param is in the path_params + if param_ref_name not in [ + k.ref for k in [p for p in path_params] + ]: + path_params.append(ParameterSchema(ref=param_ref_name)) + else: + path_resource_names.append(path_element.replace("-", "_")) + # Cleanup path_resource_names + # if len(path_resource_names) > 0 and VERSION_RE.match(path_resource_names[0]): + # # We should not have version prefix in the path_resource_names + # path_resource_names.pop(0) + if len(path_resource_names) == 0: + path_resource_names.append("root") + elif path_elements[-1].startswith("{"): + rn = path_resource_names[-1] + if rn.endswith("ies"): + rn = rn.replace("ies", "y") + if rn.endswith("sses"): + rn = rn[:-2] + else: + rn = rn.rstrip("s") + path_resource_names[-1] = rn + + # Set operationId + operation_id = re.sub( + r"^(/?v[0-9.]*/)", + "", + "/".join([x.strip("{}") for x in path_elements]) + + f":{method.lower()}", # noqa + ) + + if action in versioned_methods: + # Normal REST operation with version bounds + (start_version, end_version) = (None, None) + + # if len(versioned_methods[action]) > 1: + # for m in versioned_methods[action]: + # raise RuntimeError("Multiple versioned methods for action %s:%s: %s", path, action, versioned_methods[action]) + for versioned_method in sorted( + versioned_methods[action], key=lambda v: v.start_version + ): + start_version = versioned_method.start_version + end_version = versioned_method.end_version + func = versioned_method.func + + # Get the path/op spec only when we have + # something to fill in + path_spec = openapi_spec.paths.setdefault( + path, PathSchema(parameters=path_params) + ) + operation_spec = getattr(path_spec, method.lower()) + if not operation_spec.operationId: + operation_spec.operationId = operation_id + operation_spec.tags.extend(operation_tags) + operation_spec.tags = list(set(operation_spec.tags)) + + self.process_operation( + func, + openapi_spec, + operation_spec, + path_resource_names, + controller=controller, + method=method, + operation_name=action, + start_version=start_version, + end_version=end_version, + ) + elif action and hasattr(contr, action): + # Normal REST operation without version bounds + func = getattr(contr, action) + + # Get the path/op spec only when we have + # something to fill in + path_spec = openapi_spec.paths.setdefault( + path, PathSchema(parameters=path_params) + ) + operation_spec = getattr(path_spec, method.lower()) + if not operation_spec.operationId: + operation_spec.operationId = operation_id + operation_spec.tags.extend(operation_tags) + operation_spec.tags = list(set(operation_spec.tags)) + + self.process_operation( + func, + openapi_spec, + operation_spec, + path_resource_names, + controller=controller, + operation_name=action, + method=method, + path=path, + ) + elif action != "action" and action in controller_actions: + # Normal REST operation without version bounds and present in + # wsgi_actions of child or parent controller. Example is + # compute.flavor.create/update which are exposed as wsgi actions + # (BUG) + func = controller_actions[action] + + # Get the path/op spec only when we have + # something to fill in + path_spec = openapi_spec.paths.setdefault( + path, PathSchema(parameters=path_params) + ) + operation_spec = getattr(path_spec, method.lower()) + if not operation_spec.operationId: + operation_spec.operationId = operation_id + operation_spec.tags.extend(operation_tags) + operation_spec.tags = list(set(operation_spec.tags)) + + self.process_operation( + func, + openapi_spec, + operation_spec, + path_resource_names, + controller=controller, + operation_name=action, + method=method, + path=path, + ) + + elif ( + controller_actions and action == "action" + ): # and action in controller_actions: + # There are ACTIONS present on the controller + for action, op_name in controller_actions.items(): + logging.info("Action %s: %s", action, op_name) + (start_version, end_version) = (None, None) + if isinstance(op_name, str): + # wsgi action value is a string + if op_name in versioned_methods: + # ACTION with version bounds + if len(versioned_methods[op_name]) > 1: + raise RuntimeError( + "Multiple versioned methods for action %s", + action, + ) + for ver_method in versioned_methods[op_name]: + start_version = ver_method.start_version + end_version = ver_method.end_version + func = ver_method.func + logging.info("Versioned action %s", func) + # operation_id += f"[{op_name}]" + elif hasattr(contr, op_name): + # ACTION with no version bounds + func = getattr(contr, op_name) + # operation_id += f"[{op_name}]" + logging.info("Unversioned action %s", func) + else: + logging.error( + "Cannot find code for %s:%s:%s [%s]", + path, + method, + action, + dir(contr), + ) + continue + elif callable(op_name): + # Action is already a function (compute.flavors) + closurevars = inspect.getclosurevars(op_name) + # Versioned actions in nova can be themelves as a + # version_select wrapped callable (i.e. baremetal.action) + key = closurevars.nonlocals.get("key", None) + slf = closurevars.nonlocals.get("self", None) + + if key and key in versioned_methods: + # ACTION with version bounds + if len(versioned_methods[key]) > 1: + raise RuntimeError( + "Multiple versioned methods for action %s", + action, + ) + for ver_method in versioned_methods[key]: + start_version = ver_method.start_version + end_version = ver_method.end_version + func = ver_method.func + logging.info("Versioned action %s", func) + elif slf and key: + vm = getattr(slf, "versioned_methods", None) + if vm and key in vm: + # ACTION with version bounds + if len(vm[key]) > 1: + raise RuntimeError( + "Multiple versioned methods for action %s", + action, + ) + for ver_method in vm[key]: + start_version = ver_method.start_version + end_version = ver_method.end_version + func = ver_method.func + logging.info("Versioned action %s", func) + else: + func = op_name + + # Get the path/op spec only when we have + # something to fill in + path_spec = openapi_spec.paths.setdefault( + path, PathSchema(parameters=path_params) + ) + operation_spec = getattr(path_spec, method.lower()) + if not operation_spec.operationId: + operation_spec.operationId = operation_id + operation_spec.tags.extend(operation_tags) + operation_spec.tags = list(set(operation_spec.tags)) + + self.process_operation( + func, + openapi_spec, + operation_spec, + path_resource_names, + controller=controller, + operation_name=action, + method=method, + start_version=start_version, + end_version=end_version, + mode="action", + path=path, + ) + elif framework == "pecan": + if callable(controller): + func = controller + # Get the path/op spec only when we have + # something to fill in + path_spec = openapi_spec.paths.setdefault( + path, PathSchema(parameters=path_params) + ) + operation_spec = getattr(path_spec, method.lower()) + if not operation_spec.operationId: + operation_spec.operationId = operation_id + operation_spec.tags.extend(operation_tags) + operation_spec.tags = list(set(operation_spec.tags)) + + self.process_operation( + func, + openapi_spec, + operation_spec, + path_resource_names, + controller=controller, + operation_name=action, + method=method, + path=path, + ) + + elif framework == "placement": + if callable(controller.func): + func = controller.func + # Get the path/op spec only when we have + # something to fill in + path_spec = openapi_spec.paths.setdefault( + path, PathSchema(parameters=path_params) + ) + operation_spec = getattr(path_spec, method.lower()) + if not operation_spec.operationId: + operation_spec.operationId = operation_id + if operation_tags: + operation_spec.tags.extend(operation_tags) + operation_spec.tags = list(set(operation_spec.tags)) + + self.process_operation( + func, + openapi_spec, + operation_spec, + path_resource_names, + controller=controller, + operation_name=func.__name__, + method=method, + path=path, + ) + + else: + logging.warning(controller.__dict__.items()) + logging.warning(contr.__dict__.items()) + logging.warning("No operation found") + + return operation_spec + + def process_operation( + self, + func, + openapi_spec, + operation_spec, + path_resource_names, + *, + controller=None, + operation_name=None, + method=None, + start_version=None, + end_version=None, + mode=None, + path: str | None = None, + ): + logging.info( + "%s: %s [%s]", + (mode or "operation").title(), + operation_name, + func, + ) + deser_schema = None + deser = getattr(controller, "deserializer", None) + if deser: + deser_schema = getattr(deser, "schema", None) + ser = getattr(controller, "serializer", None) + # deser_schema = getattr(deser, "schema", None) + ser_schema = getattr(ser, "schema", None) + if not ser_schema and hasattr(ser, "task_schema"): + # Image Task serializer is a bit different + ser_schema = getattr(ser, "task_schema") + + if mode != "action": + doc = inspect.getdoc(func) + if doc and not operation_spec.description: + operation_spec.description = LiteralScalarString(doc) + if operation_spec.description: + # Reading spec from yaml file it was converted back to regular + # string. Therefore need to force it back to Literal block. + operation_spec.description = LiteralScalarString( + operation_spec.description + ) + + action_name = None + query_params_versions = [] + body_schemas = [] + expected_errors = ["404"] + response_code = None + # Version bound on an operation are set only when it is not an + # "action" + if ( + mode != "action" + and start_version + and self._api_ver_major(start_version) != 0 + ): + if not ( + "min-ver" in operation_spec.openstack + and tuple( + [ + int(x) + for x in operation_spec.openstack["min-ver"].split(".") + ] + ) + < (self._api_ver(start_version)) + ): + operation_spec.openstack["min-ver"] = ( + start_version.get_string() + ) + + if mode != "action" and end_version: + if end_version.ver_major == 0: + operation_spec.openstack.pop("max-ver", None) + operation_spec.deprecated = None + else: + # There is some end_version. Set the deprecated flag and wait + # for final version to be processed which drop it if max_ver + # is not set + operation_spec.deprecated = True + if not ( + "max-ver" in operation_spec.openstack + and tuple( + [ + int(x) + for x in operation_spec.openstack["max-ver"].split( + "." + ) + ] + ) + > self._api_ver(end_version) + ): + operation_spec.openstack["max-ver"] = ( + end_version.get_string() + ) + + action_name = getattr(func, "wsgi_action", None) + if action_name: + operation_name = action_name + + # Unwrap operation decorators to access all properties + f = func + while hasattr(f, "__wrapped__"): + closure = inspect.getclosurevars(f) + closure_locals = closure.nonlocals + min_ver = closure_locals.get("min_version", start_version) + max_ver = closure_locals.get("max_version", end_version) + + if "errors" in closure_locals: + expected_errors = closure_locals["errors"] + if isinstance(expected_errors, list): + expected_errors = [ + str(x) + for x in filter( + lambda x: isinstance(x, int), expected_errors + ) + ] + elif isinstance(expected_errors, int): + expected_errors = [str(expected_errors)] + if "request_body_schema" in closure_locals: + # Body type is known through method decorator + obj = closure_locals["request_body_schema"] + if obj.get("type") in ["object", "array"]: + # We only allow object and array bodies + # To prevent type name collision keep module name part of the name + typ_name = ( + "".join([x.title() for x in path_resource_names]) + + func.__name__.title() + + (f"_{min_ver.replace('.', '')}" if min_ver else "") + ) + comp_schema = openapi_spec.components.schemas.setdefault( + typ_name, + self._sanitize_schema( + copy.deepcopy(obj), + start_version=start_version, + end_version=end_version, + ), + ) + + if min_ver: + if not comp_schema.openstack: + comp_schema.openstack = {} + comp_schema.openstack["min-ver"] = min_ver + if max_ver: + if not comp_schema.openstack: + comp_schema.openstack = {} + comp_schema.openstack["max-ver"] = max_ver + if mode == "action": + if not comp_schema.openstack: + comp_schema.openstack = {} + comp_schema.openstack["action-name"] = action_name + + ref_name = f"#/components/schemas/{typ_name}" + body_schemas.append(ref_name) + if "query_params_schema" in closure_locals: + obj = closure_locals["query_params_schema"] + query_params_versions.append((obj, min_ver, max_ver)) + + f = f.__wrapped__ + + if hasattr(func, "_wsme_definition"): + fdef = getattr(func, "_wsme_definition") + body_spec = getattr(fdef, "body_type", None) + if body_spec: + body_schema = _convert_wsme_to_jsonschema(body_spec) + schema_name = body_spec.__name__ + openapi_spec.components.schemas.setdefault( + schema_name, TypeSchema(**body_schema) + ) + body_schemas.append(f"#/components/schemas/{schema_name}") + rsp_spec = getattr(fdef, "return_type", None) + if rsp_spec: + ser_schema = _convert_wsme_to_jsonschema(rsp_spec) + response_code = getattr(fdef, "status_code", None) + + if not body_schemas and deser_schema: + # Glance may have request deserializer attached schema + schema_name = ( + "".join([x.title() for x in path_resource_names]) + + func.__name__.title() + + "Request" + ) + (body_schema, mime_type) = self._get_schema_ref( + openapi_spec, + schema_name, + description=f"Request of the {operation_spec.operationId} operation", + schema_def=deser_schema, + ) + + if query_params_versions: + so = sorted( + query_params_versions, + key=lambda d: d[1].split(".") if d[1] else (0, 0), + ) + for data, min_ver, max_ver in so: + self.process_query_parameters( + openapi_spec, + operation_spec, + path_resource_names, + data, + min_ver, + max_ver, + ) + # if body_schemas or mode == "action": + if method in ["PUT", "POST", "PATCH"]: + self.process_body_parameters( + openapi_spec, + operation_spec, + path_resource_names, + body_schemas, + mode, + operation_name, + ) + + responses_spec = operation_spec.responses + for error in expected_errors: + responses_spec.setdefault(str(error), dict(description="Error")) + + if mode != "action" and str(error) == "410": + # This looks like a deprecated operation still hanging out there + operation_spec.deprecated = True + if not response_code: + response_codes = getattr(func, "wsgi_code", None) + if response_codes and not isinstance(response_codes, list): + response_codes = [response_codes] + else: + response_codes = [response_code] + if not response_codes: + # No expected response code known, take "normal" defaults + response_codes = self._get_response_codes( + method, operation_spec.operationId + ) + if response_codes: + for response_code in response_codes: + rsp = responses_spec.setdefault( + str(response_code), dict(description="Ok") + ) + if str(response_code) != "204" and method != "DELETE": + # Arrange response placeholder + schema_name = ( + "".join([x.title() for x in path_resource_names]) + + ( + operation_name.replace("index", "list").title() + if not path_resource_names[-1].endswith( + operation_name + ) + else "" + ) + + "Response" + ) + (schema_ref, mime_type) = self._get_schema_ref( + openapi_spec, + schema_name, + description=( + f"Response of the {operation_spec.operationId} operation" + if not action_name + else f"Response of the {operation_spec.operationId}:{action_name} action" + ), # noqa + schema_def=ser_schema, + action_name=action_name, + ) + + if schema_ref: + curr_schema = ( + rsp.get("content", {}) + .get("application/json", {}) + .get("schema", {}) + ) + if mode == "action" and curr_schema: + # There is existing response for the action. Need to + # merge them + if isinstance(curr_schema, dict): + curr_oneOf = curr_schema.get("oneOf") + curr_ref = curr_schema.get("$ref") + else: + curr_oneOf = curr_schema.oneOf + curr_ref = curr_schema.ref + if curr_oneOf: + if schema_ref not in [ + x["$ref"] for x in curr_oneOf + ]: + curr_oneOf.append({"$ref": schema_ref}) + elif curr_ref and curr_ref != schema_ref: + rsp["content"]["application/json"][ + "schema" + ] = TypeSchema( + oneOf=[ + {"$ref": curr_ref}, + {"$ref": schema_ref}, + ] + ) + else: + rsp["content"] = { + "application/json": { + "schema": {"$ref": schema_ref} + } + } + + # Ensure operation tags are existing + for tag in operation_spec.tags: + if tag not in [x["name"] for x in openapi_spec.tags]: + openapi_spec.tags.append({"name": tag}) + + self._post_process_operation_hook( + openapi_spec, operation_spec, path=path + ) + + def _post_process_operation_hook( + self, openapi_spec, operation_spec, path: str | None = None + ): + """Hook to allow service specific generator to modify details""" + pass + + def process_query_parameters( + self, + openapi_spec, + operation_spec, + path_resource_names, + obj, + min_ver, + max_ver, + ): + """Process query parameters in different versions + + It is expected, that this method is invoked in the raising min_ver order to do proper cleanup of max_ver + """ + # Yey - we have query_parameters + if obj["type"] == "object": + params = obj["properties"] + for prop, spec in params.items(): + param_name = "_".join(path_resource_names) + f"_{prop}" + + param_attrs: dict[str, TypeSchema | dict] = {} + if spec["type"] == "array": + param_attrs["schema"] = TypeSchema( + **copy.deepcopy(spec["items"]) + ) + else: + raise RuntimeError("Error") + if min_ver: + os_ext = param_attrs.setdefault("x-openstack", {}) + os_ext["min-ver"] = min_ver + if max_ver: + os_ext = param_attrs.setdefault("x-openstack", {}) + os_ext["max-ver"] = max_ver + ref_name = self._get_param_ref( + openapi_spec, + param_name, + prop, + param_location="query", + path=None, + **param_attrs, + ) + if ref_name not in [x.ref for x in operation_spec.parameters]: + operation_spec.parameters.append( + ParameterSchema(ref=ref_name) + ) + + else: + raise RuntimeError( + "Query parameters %s is not an object as expected" % obj + ) + + def process_body_parameters( + self, + openapi_spec, + operation_spec, + path_resource_names, + body_schemas, + mode, + action_name, + ): + op_body = operation_spec.requestBody.setdefault("content", {}) + mime_type: str = "application/json" + schema_name = None + # We should not modify path_resource_names of the caller + path_resource_names = path_resource_names.copy() + # Create container schema with version discriminator + if action_name: + path_resource_names.append(action_name) + + cont_schema_name = ( + "".join([x.title() for x in path_resource_names]) + "Request" + ) + cont_schema = None + + if len(body_schemas) == 1: + # There is only one body known at the moment + if cont_schema_name in openapi_spec.components.schemas: + # if we have already oneOf - add there + cont_schema = openapi_spec.components.schemas[cont_schema_name] + if cont_schema.oneOf and body_schemas[0] not in [ + x["$ref"] for x in cont_schema.oneOf + ]: + cont_schema.oneOf.append({"$ref": body_schemas[0]}) + schema_ref = f"#/components/schemas/{cont_schema_name}" + else: + # otherwise just use schema as body + schema_ref = body_schemas[0] + elif len(body_schemas) > 1: + # We may end up here multiple times if we have versioned operation. In this case merge to what we have already + old_schema = op_body.get(mime_type, {}).get("schema", {}) + old_ref = ( + old_schema.ref + if isinstance(old_schema, TypeSchema) + else old_schema.get("$ref") + ) + cont_schema = openapi_spec.components.schemas.setdefault( + cont_schema_name, + TypeSchema( + oneOf=[], openstack={"discriminator": "microversion"} + ), + ) + # Add new refs to the container oneOf if they are not already + # there + cont_schema.oneOf.extend( + [ + {"$ref": n} + for n in body_schemas + if n not in [x.get("$ref") for x in cont_schema.oneOf] + ] + ) + schema_ref = f"#/components/schemas/{cont_schema_name}" + if ( + old_ref + and old_ref != schema_ref + and old_ref not in [x["$ref"] for x in cont_schema.oneOf] + ): + # In a previous iteration we only had one schema and decided + # not to create container. Now we need to change that by + # merging with previous data + cont_schema.oneOf.append({"$ref": old_ref}) + elif len(body_schemas) == 0 and mode == "action": + # There are actions without a real body description, but we know that action requires dummy body + cont_schema = openapi_spec.components.schemas.setdefault( + cont_schema_name, + TypeSchema( + description=f"Empty body for {action_name} action", + type="object", + properties={action_name: {"type": "null"}}, + openstack={"action-name": action_name}, + ), + ) + schema_ref = f"#/components/schemas/{cont_schema_name}" + elif len(body_schemas) == 0: + # We know nothing about request + schema_name = ( + "".join([x.title() for x in path_resource_names]) + + ( + action_name.replace("index", "list").title() + if not path_resource_names[-1].endswith(action_name) + else "" + ) + + "Request" + ) + (schema_ref, mime_type) = self._get_schema_ref( + openapi_spec, + schema_name, + description=f"Request of the {operation_spec.operationId} operation", + action_name=action_name, + ) + + if mode == "action": + js_content = op_body.setdefault(mime_type, {}) + body_schema = js_content.setdefault("schema", {}) + one_of = body_schema.setdefault("oneOf", []) + if schema_ref not in [x.get("$ref") for x in one_of]: + one_of.append({"$ref": schema_ref}) + os_ext = body_schema.setdefault("x-openstack", {}) + os_ext["discriminator"] = "action" + if cont_schema and action_name: + cont_schema.openstack["action-name"] = action_name + elif schema_ref: + js_content = op_body.setdefault(mime_type, {}) + body_schema = js_content.setdefault("schema", {}) + operation_spec.requestBody["content"][mime_type]["schema"] = ( + TypeSchema(ref=schema_ref) + ) + + def _sanitize_schema( + self, schema, *, start_version=None, end_version=None + ): + """Various schemas are broken in various ways""" + + if isinstance(schema, dict): + # Forcibly convert to TypeSchema + schema = TypeSchema(**schema) + properties = getattr(schema, "properties", None) + if properties: + # Nova aggregates schemas are broken since they have "type": "object" inside "properties + if properties.get("type") == "object": + schema.properties.pop("type") + + if "anyOf" in properties: + # anyOf must be on the properties level and not under (nova host update) + anyOf = schema.properties.pop("anyOf") + schema.anyOf = anyOf + + for k, v in properties.items(): + typ = v.get("type") + if typ == "object": + schema.properties[k] = self._sanitize_schema(v) + if typ == "array" and "additionalItems" in v: + # additionalItems have nothing to do under the type array (create servergroup) + schema.properties[k].pop("additionalItems") + if typ == "array" and isinstance(v["items"], list): + # server_group create - type array "items" is a dict and not list + schema.properties[k]["items"] = v["items"][0] + if start_version and self._api_ver_major(start_version) not in [ + "0", + 0, + ]: + if not schema.openstack: + schema.openstack = {} + schema.openstack["min-ver"] = start_version.get_string() + if end_version and self._api_ver_major(end_version) not in ["0", 0]: + if not schema.openstack: + schema.openstack = {} + schema.openstack["max-ver"] = end_version.get_string() + return schema + + def _get_param_ref( + self, + openapi_spec, + ref_name: str, + param_name: str, + param_location: str, + path: str | None = None, + **param_attrs, + ): + if ref_name == "_project_id": + ref_name = "project_id" + ref_name = ref_name.replace(":", "_") + # Pop extensions for easier post processing + if param_attrs: + os_ext = param_attrs.pop("x-openstack", None) + else: + os_ext = None + # Ensure global parameter is present + param = ParameterSchema( + location=param_location, name=param_name, **param_attrs + ) + if param_location == "path": + param.required = True + if not param.description and path: + param.description = f"{param_name} parameter for {path} API" + # We can only assume the param type. For path it is logically a string only + if not param.type_schema: + param.type_schema = TypeSchema(type="string") + if os_ext and ("min-ver" in os_ext or "max-ver" in os_ext): + # min_ver is present + old_param = openapi_spec.components.parameters.get(ref_name, None) + if not old_param: + # Param was not present, just set what we have + param.openstack = os_ext + else: + # Param is already present. Check whether we need to modify min_ver + min_ver = os_ext.get("min-ver") + max_ver = os_ext.get("max-ver") + param.openstack = dict() + if not old_param.openstack: + old_param.openstack = {} + old_min_ver = old_param.openstack.get("min-ver") + old_max_ver = old_param.openstack.get("max-ver") + if old_min_ver and tuple(old_min_ver.split(".")) < tuple( + min_ver.split(".") + ): + # Existing param has lower min_ver. Keep the old value + os_ext["min-ver"] = old_min_ver + if ( + old_max_ver + and max_ver + and tuple(old_max_ver.split(".")) + > tuple(max_ver.split(".")) + ): + # Existing param has max_ver higher then what we have now. Keep old value + os_ext["max_ver"] = old_max_ver + if os_ext: + param.openstack = os_ext + + # Overwrite param + openapi_spec.components.parameters[ref_name] = param + return f"#/components/parameters/{ref_name}" + + def _get_schema_ref( + self, + openapi_spec, + name, + description=None, + schema_def=None, + action_name=None, + ) -> tuple[str, str]: + if not schema_def: + logging.warn( + "No Schema definition for %s[%s] is known", name, action_name + ) + schema_def = { + "type": "object", + "description": LiteralScalarString(description), + } + schema = openapi_spec.components.schemas.setdefault( + name, + TypeSchema( + **schema_def, + ), + ) + + if action_name: + if not schema.openstack: + schema.openstack = {} + schema.openstack.setdefault("action-name", action_name) + + return (f"#/components/schemas/{name}", "application/json") + + def _get_tags_for_url(self, url): + """Return Tag (group) name based on the URL""" + # Drop version prefix + url = re.sub(r"^(/v[0-9.]*/)", "/", url) + + for k, v in self.URL_TAG_MAP.items(): + if url.startswith(k): + return [v] + if url == "/": + return ["version"] + path_elements: list[str] = list(filter(None, url.split("/"))) + for el in path_elements: + # Use 1st (non project_id) path element as tag + if not el.startswith("{"): + return [el] + + @classmethod + def _get_response_codes(cls, method: str, operationId: str) -> list[str]: + if method == "DELETE": + response_code = "204" + elif method == "POST": + response_code = "201" + else: + response_code = "200" + return [response_code] + + +def _convert_wsme_to_jsonschema(body_spec): + """Convert WSME type description to JsonSchema""" + res: dict[str, Any] = {} + if wtypes.iscomplex(body_spec) or isinstance(body_spec, wtypes.wsattr): + res = {"type": "object", "properties": {}} + doc = inspect.getdoc(body_spec) + if doc: + res.setdefault("description", LiteralScalarString(doc)) + required = set() + for attr in wtypes.list_attributes(body_spec): + attr_value = getattr(body_spec, attr.key) + if isinstance(attr_value, wtypes.wsproperty): + r = _convert_wsme_to_jsonschema(attr_value) + else: + r = _convert_wsme_to_jsonschema(attr_value._get_datatype()) + res["properties"][attr.key] = r + if attr.mandatory: + required.add(attr.name) + # todo: required + if required: + res.setdefault("required", list(required)) + elif isinstance(body_spec, wtypes.ArrayType): + res = { + "type": "array", + "items": _convert_wsme_to_jsonschema(body_spec.item_type), + } + elif isinstance(body_spec, wtypes.StringType) or body_spec is str: + res = {"type": "string"} + min_len = getattr(body_spec, "min_length", None) + max_len = getattr(body_spec, "max_length", None) + if min_len: + res["minLength"] = min_len + if max_len: + res["maxLength"] = max_len + elif isinstance(body_spec, wtypes.IntegerType): + res = {"type": "integer"} + minimum = getattr(body_spec, "minimum", None) + maximum = getattr(body_spec, "maximum", None) + if minimum: + res["minimum"] = minimum + if maximum: + res["maximum"] = maximum + elif isinstance(body_spec, wtypes.Enum): + basetype = body_spec.basetype + values = body_spec.values + if basetype is str: + res = {"type": "string"} + elif basetype is float: + res = {"type": "number"} + elif basetype is int: + res = {"type": "integer"} + else: + raise RuntimeError("Unsupported basetype %s" % basetype) + res["enum"] = list(values) + # elif hasattr(body_spec, "__name__") and body_spec.__name__ == "bool": + elif wtypes.isdict(body_spec): + res = { + "type": "object", + "additionalProperties": { + "type": _convert_wsme_to_jsonschema(body_spec.value_type) + }, + } + elif wtypes.isusertype(body_spec): + basetype = body_spec.basetype + name = body_spec.name + if basetype is str: + res = {"type": "string", "format": name} + else: + raise RuntimeError("Unsupported basetype %s" % basetype) + elif isinstance(body_spec, wtypes.wsproperty): + res = _convert_wsme_to_jsonschema(body_spec.datatype) + elif body_spec is bool: + # wsattr(bool) lands here as <class 'bool'> + res = {"type": "boolean"} + elif body_spec is float: + res = {"type": "number", "format": "float"} + elif ( + isinstance(body_spec, wtypes.dt_types) + or body_spec is datetime.datetime + ): + res = {"type": "string", "format": "date-time"} + else: + raise RuntimeError("Unsupported object %s" % body_spec) + + return res diff --git a/codegenerator/openapi/cinder.py b/codegenerator/openapi/cinder.py new file mode 100644 index 0000000..1a12d1e --- /dev/null +++ b/codegenerator/openapi/cinder.py @@ -0,0 +1,346 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +from multiprocessing import Process +from pathlib import Path + +from ruamel.yaml.scalarstring import LiteralScalarString + +from codegenerator.common.schema import ParameterSchema +from codegenerator.common.schema import SpecSchema +from codegenerator.common.schema import TypeSchema +from codegenerator.openapi.base import OpenStackServerSourceBase +from codegenerator.openapi import cinder_schemas +from codegenerator.openapi.utils import merge_api_ref_doc + + +class CinderV3Generator(OpenStackServerSourceBase): + URL_TAG_MAP = { + "/versions": "version", + } + + def _api_ver_major(self, ver): + return ver._ver_major + + def _api_ver_minor(self, ver): + return ver._ver_minor + + def _api_ver(self, ver): + return (ver._ver_major, ver._ver_minor) + + def generate(self, target_dir, args): + proc = Process(target=self._generate, args=[target_dir, args]) + proc.start() + proc.join() + if proc.exitcode != 0: + raise RuntimeError("Error generating Cinder OpenAPI schma") + return Path(target_dir, "openapi_specs", "block-storage", "v3.yaml") + + def _generate(self, target_dir, args, *pargs, **kwargs): + from cinder import objects, rpc + from cinder.api.openstack import api_version_request + from cinder.common import config + from cinder.tests.unit.test import Database as db_fixture + + # Register all Cinder objects + objects.register_all() + + CONF = config.CONF + + self.api_version = api_version_request._MAX_API_VERSION + self.min_api_version = api_version_request._MIN_API_VERSION + + rpc.init(CONF) + + CONF.set_default("connection", "sqlite:///", "database") + CONF.set_default("sqlite_synchronous", False, "database") + + self.useFixture(db_fixture()) + + from cinder.api.v3 import router + + self.router = router.APIRouter() + + work_dir = Path(target_dir) + work_dir.mkdir(parents=True, exist_ok=True) + + impl_path = Path(work_dir, "openapi_specs", "block-storage", "v3.yaml") + impl_path.parent.mkdir(parents=True, exist_ok=True) + + openapi_spec = self.load_openapi(impl_path) + if not openapi_spec: + openapi_spec = SpecSchema( + info=dict( + title="OpenStack Volume API", + description=LiteralScalarString( + "Volume API provided by Cinder service" + ), + version=self.api_version, + ), + openapi="3.1.0", + security=[{"ApiKeyAuth": []}], + components=dict( + securitySchemes={ + "ApiKeyAuth": { + "type": "apiKey", + "in": "header", + "name": "X-Auth-Token", + } + }, + ), + ) + + # Set global parameters + for name, definition in cinder_schemas.VOLUME_PARAMETERS.items(): + openapi_spec.components.parameters[name] = ParameterSchema( + **definition + ) + + for route in self.router.map.matchlist: + # if route.routepath.startswith("/{project"): + # continue + if route.routepath.endswith(".:(format)"): + continue + + self._process_route(route, openapi_spec, ver_prefix="/v3") + + self._sanitize_param_ver_info(openapi_spec, self.min_api_version) + + if args.api_ref_src: + merge_api_ref_doc(openapi_spec, args.api_ref_src) + + self.dump_openapi(openapi_spec, impl_path, args.validate) + + return impl_path + + def _post_process_operation_hook( + self, openapi_spec, operation_spec, path: str | None = None + ): + """Hook to allow service specific generator to modify details""" + operationId = operation_spec.operationId + + if operationId in [ + "project_id/volumes:get", + "volumes:get", + "project_id/volumes/detail:get", + "volumes/detail:get", + ]: + for pname in [ + "all_tenants", + "sort", + "sort_key", + "sort_dir", + "limit", + "offset", + "marker", + "with_count", + "created_at", + "updated_at", + "consumes_quota", + ]: + ref = f"#/components/parameters/{pname}" + if ref not in [x.ref for x in operation_spec.parameters]: + operation_spec.parameters.append(ParameterSchema(ref=ref)) + elif operationId in [ + "project_id/volumes/summary:get", + ]: + for pname in [ + "all_tenants", + ]: + ref = f"#/components/parameters/{pname}" + if ref not in [x.ref for x in operation_spec.parameters]: + operation_spec.parameters.append(ParameterSchema(ref=ref)) + + elif operationId in [ + "project_id/types:get", + ]: + for key, val in cinder_schemas.VOLUME_TYPE_LIST_PARAMETERS.items(): + openapi_spec.components.parameters.setdefault( + key, ParameterSchema(**val) + ) + ref = f"#/components/parameters/{key}" + if ref not in [x.ref for x in operation_spec.parameters]: + operation_spec.parameters.append(ParameterSchema(ref=ref)) + + def _get_schema_ref( + self, + openapi_spec, + name, + description=None, + schema_def=None, + action_name=None, + ): + mime_type: str = "application/json" + # ### Volume + if name == "VolumesListResponse": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**cinder_schemas.VOLUMES_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + if name == "VolumesDetailResponse": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**cinder_schemas.VOLUMES_DETAIL_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "VolumeShowResponse", + "VolumeUpdateResponse", + "VolumesCreateResponse", + ]: + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**cinder_schemas.VOLUME_CONTAINER_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + # ### Volume Metadata + elif name in [ + "VolumesMetadataListResponse", + "VolumesMetadataUpdate_All", + "VolumesMetadataUpdate_AllResponse", + "VolumesMetadataCreateResponse", + "VolumesActionOs-Set_Image_MetadataResponse", + "VolumesActionOs-Show_Image_MetadataResponse", + ]: + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**cinder_schemas.METADATA_CONTAINER_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "VolumesMetadataShowResponse", + "VolumesMetadataUpdate", + "VolumesMetadataUpdateResponse", + ]: + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**cinder_schemas.METADATA_ITEM_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + # Volume Actions + elif name == "VolumesActionRevertResponse": + return (None, None) + elif name == "VolumesActionOs-Reset_StatusRequest": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**cinder_schemas.VOLUME_RESET_STATUS_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "VolumesActionOs-Reset_StatusResponse", + "VolumesActionOs-Force_DeleteResponse", + "VolumesActionOs-Force_DetachResponse", + "VolumesActionOs-Migrate_VolumeResponse", + "VolumesActionOs-Migrate_Volume_CompletionResponse", + "VolumesActionOs-AttachResponse", + "VolumesActionOs-DetachResponse", + "VolumesActionOs-ReserveResponse", + "VolumesActionOs-UnreserveResponse", + "VolumesActionOs-Begin_DetachingResponse", + "VolumesActionOs-Roll_DetachingResponse", + "VolumesActionOs-Initialize_ConnectionResponse", + "VolumesActionOs-Terminate_ConnectionResponse", + "VolumesActionOs-ExtendResponse", + "VolumesActionOs-Update_Readonly_FlagResponse", + "VolumesActionOs-RetypeResponse", + "VolumesActionOs-Set_BootableResponse", + "VolumesActionOs-ReimageResponse", + "VolumesActionOs-Unset_Image_MetadataResponse", + "VolumesActionOs-UnmanageResponse", + ]: + return (None, None) + elif name == "VolumesActionOs-Volume_Upload_ImageResponse": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema( + **cinder_schemas.VOLUME_UPLOAD_IMAGE_RESPONSE_SCHEMA + ), + ) + ref = f"#/components/schemas/{name}" + # ### Volume Type + elif name == "TypesListResponse": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**cinder_schemas.VOLUME_TYPES_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "TypesCreateResponse", + "TypeShowResponse", + "TypeUpdateResponse", + ]: + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**cinder_schemas.VOLUME_TYPE_CONTAINER_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "TypesExtra_SpecsListResponse", + "TypesExtra_SpecsCreateResponse", + ]: + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**cinder_schemas.VOLUME_TYPE_EXTRA_SPECS_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + + elif name in [ + "TypesExtra_SpecShowResponse", + "TypesExtra_SpecUpdateResponse", + ]: + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**cinder_schemas.VOLUME_TYPE_EXTRA_SPEC_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + + elif name == "TypesOs_Volume_Type_AccessListResponse": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**cinder_schemas.VOLUME_TYPE_ACCESS_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "TypesActionAddprojectaccessResponse", + "TypesActionRemoveprojectaccessResponse", + ]: + return (None, None) + + # ### Volume Type Encryption + # this is not really a list operation, but who cares + elif name == "TypesEncryptionListResponse": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**cinder_schemas.VOLUME_TYPE_ENCRYPTION_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name == "TypesEncryptionShowResponse": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema( + **cinder_schemas.VOLUME_TYPE_ENCRYPTION_SHOW_SCHEMA + ), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "TypesEncryptionCreateResponse", + "TypesEncryptionUpdateResponse", + ]: + openapi_spec.components.schemas.setdefault( + name, + TypeSchema( + **cinder_schemas.VOLUME_TYPE_ENCRYPTION_CONTAINER_SCHEMA + ), + ) + ref = f"#/components/schemas/{name}" + + # Default + else: + (ref, mime_type) = super()._get_schema_ref( + openapi_spec, name, description, action_name=action_name + ) + return (ref, mime_type) diff --git a/codegenerator/openapi/cinder_schemas.py b/codegenerator/openapi/cinder_schemas.py new file mode 100644 index 0000000..ff5599b --- /dev/null +++ b/codegenerator/openapi/cinder_schemas.py @@ -0,0 +1,609 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import copy +from typing import Any + +from cinder.api.schemas import admin_actions +from cinder.api.validation import parameter_types + +# NOTE(gtema): This is a temporary location for schemas not currently defined +# in Glance. Once everything is stabilized those must be moved directly to Glabne + +LINK_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "Links to the resources in question. See [API Guide / Links and References](https://docs.openstack.org/api-guide/compute/links_and_references.html) for more info.", + "properties": { + "href": {"type": "string", "format": "uri"}, + "rel": {"type": "string"}, + }, +} + +LINKS_SCHEMA: dict[str, Any] = { + "type": "array", + "description": "Links to the resources in question. See [API Guide / Links and References](https://docs.openstack.org/api-guide/compute/links_and_references.html) for more info.", + "items": copy.deepcopy(LINK_SCHEMA), +} + +ATTACHMENT_SCHEMA = { + "type": "object", + "properties": { + "server_id": {"type": "string", "format": "uuid"}, + "attachment_id": {"type": "string", "format": "uuid"}, + "attached_at": {"type": "string", "format": "date-time"}, + "host_name": {"type": "string"}, + "volume_id": {"type": "string", "format": "uuid"}, + "device": {"type": "string"}, + "id": {"type": "string", "format": "uuid"}, + }, +} + +ATTACHMENTS_SCHEMA = { + "type": "array", + "items": copy.deepcopy(ATTACHMENT_SCHEMA), +} + +METADATA_SCHEMA = { + "type": "object", + "patternProperties": { + "^[a-zA-Z0-9-_:. /]{1,255}$": {"type": "string", "maxLength": 255}, + }, + "additionalProperties": False, + "description": "A metadata object. Contains one or more metadata key and value pairs that are associated with the resource.", +} + +METADATA_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "Metadata key and value pairs. The maximum size for each metadata key and value pair is 255 bytes.", + "properties": {"metadata": METADATA_SCHEMA}, +} + +METADATA_ITEM_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "Metadata key and value pairs. The maximum size for each metadata key and value pair is 255 bytes.", + "properties": {"meta": {"maxProperties": 1, **METADATA_SCHEMA}}, +} + +VOLUME_SHORT_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "A volume object.", + "properties": { + "name": { + "type": ["string", "null"], + "description": "The volume name.", + }, + "links": { + "description": "The volume links.", + **copy.deepcopy(LINKS_SCHEMA), + }, + "id": { + "type": "string", + "format": "uuid", + "description": "The UUID of the volume.", + }, + }, +} + +VOLUME_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "A volume object.", + "properties": { + "name": { + "type": ["string", "null"], + "description": "The volume name.", + }, + "description": { + "type": ["string", "null"], + "description": "The volume description.", + }, + "volume_type": { + "type": "string", + "description": "The associated volume type name for the volume.", + }, + "metadata": copy.deepcopy(METADATA_SCHEMA), + "snapshot_id": { + "type": "string", + "format": "uuid", + "description": "To create a volume from an existing snapshot, specify the UUID of the volume snapshot. The volume is created in same availability zone and with same size as the snapshot.", + }, + "source_volid": { + "type": "string", + "format": "uuid", + "description": "The UUID of the source volume. The API creates a new volume with the same size as the source volume unless a larger size is requested.", + }, + "consistencygroup_id": { + "type": "string", + "format": "uuid", + "description": "The UUID of the consistency group.", + }, + "size": { + "type": "integer", + "format": "int64", + "description": "The size of the volume, in gibibytes (GiB).", + }, + "availability_zone": { + "type": "string", + "description": "The name of the availability zone.", + }, + "multiattach": { + "type": "boolean", + "description": "If true, this volume can attach to more than one instance.", + }, + "status": { + "type": "string", + "description": "The volume status.", + }, + "migration_status": { + "type": "string", + "description": "The volume migration status. Admin only.", + }, + "attachments": { + "description": "Instance attachment information. If this volume is attached to a server instance, the attachments list includes the UUID of the attached server, an attachment UUID, the name of the attached host, if any, the volume UUID, the device, and the device UUID. Otherwise, this list is empty.", + **copy.deepcopy(ATTACHMENTS_SCHEMA), + }, + "links": { + "description": "The volume links.", + **copy.deepcopy(LINKS_SCHEMA), + }, + "encrypted": { + "type": "boolean", + "description": "If true, this volume is encrypted.", + }, + "created_at": { + "type": "string", + "format": "date-time", + "description": "The date and time when the resource was created.", + }, + "updated_at": { + "type": "string", + "format": "date-time", + "description": "The date and time when the resource was updated.", + }, + "replication_status": { + "type": "string", + "description": "The volume replication status.", + }, + "id": { + "type": "string", + "format": "uuid", + "description": "The UUID of the volume.", + }, + "user_id": { + "type": "string", + "format": "uuid", + "description": "The UUID of the user.", + }, + "volume_type_id": { + "type": "string", + "format": "uuid", + "description": "The associated volume type ID for the volume.", + "x-openstack": {"min-ver": "3.63"}, + }, + "group_id": { + "type": "string", + "format": "uuid", + "description": "The ID of the group.", + "x-openstack": {"min-ver": "3.63"}, + }, + "provider_id": { + "type": ["string", "null"], + "format": "uuid", + "description": "The provider ID for the volume. The value is either a string set by the driver or null if the driver doesn’t use the field or if it hasn’t created it yet. Only returned for administrators.", + "x-openstack": {"min-ver": "3.21"}, + }, + "service_uuid": { + "type": "string", + "format": "uuid", + "description": "A unique identifier that’s used to indicate what node the volume-service for a particular volume is being serviced by.", + "x-openstack": {"min-ver": "3.48"}, + }, + "shared_targets": { + "type": "boolean", + "description": "An indicator whether the host connecting the volume should lock for the whole attach/detach process or not. true means only is iSCSI initiator running on host doesn’t support manual scans, false means never use locks, and null means to always use locks. Look at os-brick’s guard_connection context manager. Default=True.", + "x-openstack": {"min-ver": "3.48"}, + }, + "cluster_name": { + "type": "string", + "description": "The cluster name of volume backend.", + "x-openstack": {"min-ver": "3.61"}, + }, + "consumes_quota": { + "type": "boolean", + "description": "Whether this resource consumes quota or not. Resources that not counted for quota usage are usually temporary internal resources created to perform an operation.", + "x-openstack": {"min-ver": "3.65"}, + }, + }, + "additionalProperties": True, +} + +VOLUME_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "A volume object.", + "properties": {"volume": copy.deepcopy(VOLUME_SCHEMA)}, + "required": ["volume"], + "additionalProperties": False, +} + +VOLUMES_SCHEMA = { + "type": "object", + "description": "A container with list of volume objects.", + "properties": { + "volumes": { + "type": "array", + "items": copy.deepcopy(VOLUME_SHORT_SCHEMA), + "description": "A list of volume objects.", + }, + }, +} + +VOLUMES_DETAIL_SCHEMA = { + "type": "object", + "description": "A container with list of volume objects.", + "properties": { + "volumes": { + "type": "array", + "items": copy.deepcopy(VOLUME_SCHEMA), + "description": "A list of volume objects.", + }, + }, +} + +VOLUME_PARAMETERS = { + "all_tenants": { + "in": "query", + "name": "all_tenans", + "schema": { + "type": "boolean", + }, + "description": "Shows details for all project. Admin only.", + }, + "sort": { + "in": "query", + "name": "sort", + "schema": { + "type": "string", + }, + "description": "Comma-separated list of sort keys and optional sort directions in the form of < key > [: < direction > ]. A valid direction is asc (ascending) or desc (descending).", + }, + "sort_key": { + "in": "query", + "name": "sort_key", + "schema": { + "type": "string", + }, + "description": "Sorts by an attribute. A valid value is name, status, container_format, disk_format, size, id, created_at, or updated_at. Default is created_at. The API uses the natural sorting direction of the sort_key attribute value. Deprecated in favour of the combined sort parameter.", + }, + "sort_dir": { + "in": "query", + "name": "sort_dir", + "schema": { + "type": "string", + "enum": ["asc", "desc"], + }, + "description": "Sorts by one or more sets of attribute and sort direction combinations. If you omit the sort direction in a set, default is desc. Deprecated in favour of the combined sort parameter.", + }, + "limit": { + "in": "query", + "name": "limit", + "schema": { + "type": "integer", + }, + "description": "Requests a page size of items. Returns a number of items up to a limit value. Use the limit parameter to make an initial limited request and use the ID of the last-seen item from the response as the marker parameter value in a subsequent limited request.", + }, + "offset": { + "in": "query", + "name": "offset", + "schema": { + "type": "integer", + }, + "description": "Used in conjunction with limit to return a slice of items. offset is where to start in the list.", + }, + "marker": { + "in": "query", + "name": "marker", + "schema": { + "type": "string", + "format": "uuid", + }, + "description": "The ID of the last-seen item. Use the limit parameter to make an initial limited request and use the ID of the last-seen item from the response as the marker parameter value in a subsequent limited request.", + }, + "with_count": { + "in": "query", + "name": "with_count", + "schema": { + "type": "boolean", + }, + "description": "Whether to show count in API response or not, default is False.", + "x-openstack": {"min-ver": "3.45"}, + }, + "created_at": { + "in": "query", + "name": "created_at", + "schema": { + "type": "string", + "format": "date-time", + }, + "description": "Filters reuslts by a time that resources are created at with time comparison operators: gt/gte/eq/neq/lt/lte.", + "x-openstack": {"min-ver": "3.60"}, + }, + "updated_at": { + "in": "query", + "name": "updated_at", + "schema": { + "type": "string", + "format": "date-time", + }, + "description": "Filters reuslts by a time that resources are updated at with time comparison operators: gt/gte/eq/neq/lt/lte.", + "x-openstack": {"min-ver": "3.60"}, + }, + "consumes_quota": { + "in": "query", + "name": "consumes_quota", + "schema": { + "type": "boolean", + }, + "description": "Filters results by consumes_quota field. Resources that don’t use quotas are usually temporary internal resources created to perform an operation. Default is to not filter by it. Filtering by this option may not be always possible in a cloud, see List Resource Filters to determine whether this filter is available in your cloud.", + "x-openstack": {"min-ver": "3.65"}, + }, +} + +VOLUME_RESET_STATUS_SCHEMA: dict[str, Any] = admin_actions.reset + +VOLUME_UPLOAD_IMAGE_RESPONSE_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "container_format": { + "type": "string", + "description": "Container format for the new image. Default is bare.", + }, + "disk_format": { + "type": "string", + "description": "Disk format for the new image. Default is raw.", + }, + "display_description": { + "type": "string", + "description": "The volume description.", + }, + "id": { + "type": "string", + "format": "uuid", + "description": "The UUID of the volume.", + }, + "image_id": { + "type": "string", + "format": "uuid", + "description": "The uuid for the new image.", + }, + "image_name": { + "type": "string", + "description": "The name for the new image.", + }, + "protected": { + "type": "boolean", + "description": "Whether the new image is protected. Default=False.", + "x-openstack": {"min-ver": "3.1"}, + }, + "size": { + "type": "integer", + "format": "int64", + "description": "The size of the volume, in gibibytes (GiB).", + }, + "status": {"type": "integer", "description": "The volume status."}, + "updated_at": { + "type": "string", + "format": "date-time", + "description": "The date and time when the resource was updated.", + }, + "visibility": { + "type": "string", + "description": "The visibility property of the new image. Default is private.", + "x-openstack": {"min-ver": "3.1"}, + }, + "volume_type": { + "type": "string", + "description": "The associated volume type name for the volume.", + }, + }, +} + +VOLUME_TYPE_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "description": { + "type": "string", + "description": "The volume type description.", + }, + "extra_specs": { + "description": "A key and value pair that contains additional specifications that are associated with the volume type. Examples include capabilities, capacity, compression, and so on, depending on the storage driver in use.", + **parameter_types.extra_specs_with_no_spaces_key, + }, + "id": { + "type": "string", + "format": "uuid", + "description": "The UUID of the volume type.", + }, + "is_public": { + "type": "boolean", + "description": "Whether the volume type is publicly visible.", + }, + "name": { + "type": "string", + "description": "The volume type description.", + }, + "os-volume-type-access:is_public": { + "type": "boolean", + "description": "Whether the volume type is publicly visible.", + }, + "qos_specs_id": { + "type": "string", + "format": "uuid", + "description": "The QoS specifications ID.", + }, + }, +} + +VOLUME_TYPE_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"volume_type": VOLUME_TYPE_SCHEMA}, +} + +VOLUME_TYPES_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "volume_types": {"type": "array", "items": VOLUME_TYPE_SCHEMA} + }, +} + +VOLUME_TYPE_LIST_PARAMETERS: dict[str, Any] = { + "type_is_public": { + "in": "query", + "name": "is_public", + "description": "Filter the volume type by public visibility.", + "schema": {"type": "boolean"}, + }, + "type_sort": { + "in": "query", + "name": "sort", + "description": "Comma-separated list of sort keys and optional sort directions in the form of < key > [: < direction > ]. A valid direction is asc (ascending) or desc (descending).", + "schema": {"type": "string"}, + }, + "type_sort_key": { + "in": "query", + "name": "sort_key", + "description": "Sorts by an attribute. A valid value is name, status, container_format, disk_format, size, id, created_at, or updated_at. Default is created_at. The API uses the natural sorting direction of the sort_key attribute value. Deprecated in favour of the combined sort parameter.", + "schema": {"type": "string"}, + }, + "type_sort_dir": { + "in": "query", + "name": "sort_dir", + "description": "Sorts by one or more sets of attribute and sort direction combinations. If you omit the sort direction in a set, default is desc. Deprecated in favour of the combined sort parameter.", + "schema": {"type": "string"}, + }, + "type_limit": { + "in": "query", + "name": "limit", + "description": "Requests a page size of items. Returns a number of items up to a limit value. Use the limit parameter to make an initial limited request and use the ID of the last-seen item from the response as the marker parameter value in a subsequent limited request.", + "schema": {"type": "integer"}, + }, + "type_marker": { + "in": "query", + "name": "marker", + "description": "The ID of the last-seen item. Use the limit parameter to make an initial limited request and use the ID of the last-seen item from the response as the marker parameter value in a subsequent limited request.", + "schema": {"type": "string"}, + }, + "type_offset": { + "in": "query", + "name": "offset", + "description": "Used in conjunction with limit to return a slice of items. offset is where to start in the list.", + "schema": {"type": "integer"}, + }, +} + +VOLUME_TYPE_EXTRA_SPECS_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "extra_specs": { + "description": "A key and value pair that contains additional specifications that are associated with the volume type. Examples include capabilities, capacity, compression, and so on, depending on the storage driver in use.", + **parameter_types.extra_specs_with_no_spaces_key, + }, + }, +} + +VOLUME_TYPE_EXTRA_SPEC_SCHEMA: dict[str, Any] = ( + parameter_types.extra_specs_with_no_spaces_key +) + +VOLUME_TYPE_ACCESS_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "volume_type_access": { + "type": "array", + "items": { + "type": "object", + "properties": { + "project_id": { + "type": "string", + "format": "uuid", + "description": "The UUID of the project.", + }, + "volume_type_id": { + "type": "string", + "format": "uuid", + "description": "The UUID of the volume type.", + }, + }, + }, + } + }, +} + +VOLUME_TYPE_ENCRYPTION_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "cipher": { + "type": "string", + "description": "The encryption algorithm or mode. For example, aes-xts-plain64. The default value is None.", + }, + "control_location": { + "type": "string", + "enum": ["front-end", "back-end"], + "description": "Notional service where encryption is performed. Valid values are “front-end” or “back-end”. The default value is “front-end”.", + }, + "created_at": { + "type": "string", + "format": "date-time", + "description": "The date and time when the resource was created.", + }, + "deleted": { + "type": "boolean", + "description": "The resource is deleted or not.", + }, + "deleted_at": { + "type": ["string", "null"], + "format": "date-time", + "description": "The date and time when the resource was deleted.", + }, + "encryption_id": { + "type": "string", + "format": "uuid", + "description": "The UUID of the encryption.", + }, + "key_size": { + "type": "integer", + "description": "Size of encryption key, in bits. This is usually 256. The default value is None.", + }, + "provider": { + "type": "string", + "description": "The class that provides encryption support.", + }, + "updated_at": { + "type": ["string", "null"], + "format": "date-time", + "description": "The date and time when the resource was updated.", + }, + }, +} + +VOLUME_TYPE_ENCRYPTION_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"encryption": VOLUME_TYPE_ENCRYPTION_SCHEMA}, +} + +VOLUME_TYPE_ENCRYPTION_SHOW_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "cipher": { + "type": "string", + "description": "The encryption algorithm or mode. For example, aes-xts-plain64. The default value is None.", + }, + }, +} diff --git a/codegenerator/openapi/glance.py b/codegenerator/openapi/glance.py new file mode 100644 index 0000000..d308676 --- /dev/null +++ b/codegenerator/openapi/glance.py @@ -0,0 +1,703 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import copy +from multiprocessing import Process +from pathlib import Path + +from jsonref import replace_refs +import routes +from ruamel.yaml.scalarstring import LiteralScalarString + +from codegenerator.common.schema import ( + SpecSchema, + TypeSchema, + ParameterSchema, + HeaderSchema, +) +from codegenerator.openapi.base import OpenStackServerSourceBase +from codegenerator.openapi.utils import merge_api_ref_doc + +IMAGE_PARAMETERS = { + "limit": { + "in": "query", + "name": "limit", + "description": LiteralScalarString( + "Requests a page size of items. Returns a number of items up to a limit value. Use the limit parameter to make an initial limited request and use the ID of the last-seen item from the response as the marker parameter value in a subsequent limited request." + ), + "schema": {"type": "integer"}, + }, + "marker": { + "in": "query", + "name": "marker", + "description": LiteralScalarString( + "The ID of the last-seen item. Use the limit parameter to make an initial limited request and use the ID of the last-seen item from the response as the marker parameter value in a subsequent limited request." + ), + "schema": {"type": "string"}, + }, + "id": { + "in": "query", + "name": "id", + "description": "id filter parameter", + "schema": {"type": "string"}, + }, + "name": { + "in": "query", + "name": "name", + "description": LiteralScalarString( + "Filters the response by a name, as a string. A valid value is the name of an image." + ), + "schema": {"type": "string"}, + }, + "visibility": { + "in": "query", + "name": "visibility", + "description": LiteralScalarString( + "Filters the response by an image visibility value. A valid value is public, private, community, shared, or all. (Note that if you filter on shared, the images included in the response will only be those where your member status is accepted unless you explicitly include a member_status filter in the request.) If you omit this parameter, the response shows public, private, and those shared images with a member status of accepted." + ), + "schema": { + "type": "string", + "enum": ["public", "private", "community", "shared", "all"], + }, + }, + "member_status": { + "in": "query", + "name": "member_status", + "description": LiteralScalarString( + "Filters the response by a member status. A valid value is accepted, pending, rejected, or all. Default is accepted." + ), + "schema": { + "type": "string", + "enum": ["accepted", "pending", "rejected", "all"], + }, + }, + "owner": { + "in": "query", + "name": "owner", + "description": LiteralScalarString( + "Filters the response by a project (also called a “tenant”) ID. Shows only images that are shared with you by the specified owner." + ), + "schema": {"type": "string"}, + }, + "status": { + "in": "query", + "name": "status", + "description": LiteralScalarString( + "Filters the response by an image status." + ), + "schema": {"type": "string"}, + }, + "size_min": { + "in": "query", + "name": "size_min", + "description": LiteralScalarString( + "Filters the response by a minimum image size, in bytes." + ), + "schema": {"type": "string"}, + }, + "size_max": { + "in": "query", + "name": "size_max", + "description": LiteralScalarString( + "Filters the response by a maximum image size, in bytes." + ), + "schema": {"type": "string"}, + }, + "protected": { + "in": "query", + "name": "protected", + "description": LiteralScalarString( + "Filters the response by the ‘protected’ image property. A valid value is one of ‘true’, ‘false’ (must be all lowercase). Any other value will result in a 400 response." + ), + "schema": {"type": "boolean"}, + }, + "os_hidden": { + "in": "query", + "name": "os_hidden", + "description": LiteralScalarString( + 'When true, filters the response to display only "hidden" images. By default, "hidden" images are not included in the image-list response. (Since Image API v2.7)' + ), + "schema": { + "type": "boolean", + }, + "x-openstack": {"min-ver": "2.7"}, + }, + "sort_key": { + "in": "query", + "name": "sort_key", + "description": LiteralScalarString( + "Sorts the response by an attribute, such as name, id, or updated_at. Default is created_at. The API uses the natural sorting direction of the sort_key image attribute." + ), + "schema": {"type": "string"}, + }, + "sort_dir": { + "in": "query", + "name": "sort_dir", + "description": LiteralScalarString( + "Sorts the response by a set of one or more sort direction and attribute (sort_key) combinations. A valid value for the sort direction is asc (ascending) or desc (descending). If you omit the sort direction in a set, the default is desc." + ), + "schema": {"type": "string", "enum": ["asc", "desc"]}, + }, + "sort": { + "in": "query", + "name": "sort", + "description": LiteralScalarString( + "Sorts the response by one or more attribute and sort direction combinations. You can also set multiple sort keys and directions. Default direction is desc. Use the comma (,) character to separate multiple values. For example: `sort=name:asc,status:desc`" + ), + "schema": {"type": "string"}, + }, + "tag": { + "in": "query", + "name": "tag", + "description": LiteralScalarString( + "Filters the response by the specified tag value. May be repeated, but keep in mind that you're making a conjunctive query, so only images containing all the tags specified will appear in the response." + ), + "schema": {"type": "array", "items": {"type": "string"}}, + "style": "form", + "explode": True, + }, + "created_at": { + "in": "query", + "name": "created_at", + "description": LiteralScalarString( + "Specify a comparison filter based on the date and time when the resource was created." + ), + "schema": {"type": "string", "format": "date-time"}, + }, + "updated_at": { + "in": "query", + "name": "updated_at", + "description": LiteralScalarString( + "Specify a comparison filter based on the date and time when the resource was most recently modified." + ), + "schema": {"type": "string", "format": "date-time"}, + }, + "range": { + "in": "header", + "name": "Range", + "description": LiteralScalarString( + "The range of image data requested. Note that multi range requests are not supported." + ), + "schema": {"type": "string"}, + }, + "content-type": { + "in": "header", + "name": "Content-Type", + "description": LiteralScalarString( + "The media type descriptor of the body, namely application/octet-stream" + ), + "schema": {"type": "string"}, + }, + "x-image-meta-store": { + "in": "header", + "name": "X-Image-Meta-Store", + "description": LiteralScalarString( + "A store identifier to upload or import image data. Should only be included when making a request to a cloud that supports multiple backing stores. Use the Store Discovery call to determine an appropriate store identifier. Simply omit this header to use the default store." + ), + "schema": {"type": "string"}, + }, +} + +IMAGE_HEADERS = { + "Content-Type": { + "description": LiteralScalarString( + "The media type descriptor of the body, namely application/octet-stream" + ), + "schema": {"type": "string"}, + }, + "Content-Length": { + "description": LiteralScalarString( + "The length of the body in octets (8-bit bytes)" + ), + "schema": {"type": "string"}, + }, + "Content-Md5": { + "description": "The MD5 checksum of the body", + "schema": {"type": "string"}, + }, + "Content-Range": { + "description": "The content range of image data", + "schema": {"type": "string"}, + }, + "OpenStack-image-store-ids": { + "description": "list of available stores", + "schema": {"type": "array", "items": {"type": "string"}}, + }, +} + + +class GlanceGenerator(OpenStackServerSourceBase): + URL_TAG_MAP = { + "/versions": "version", + } + + def __init__(self): + self.api_version = "2.16" + self.min_api_version = None + + def _api_ver_major(self, ver): + return ver.ver_major + + def _api_ver_minor(self, ver): + return ver.ver_minor + + def _api_ver(self, ver): + return (ver.ver_major, ver.ver_minor) + + def generate(self, target_dir, args): + proc = Process(target=self._generate, args=[target_dir, args]) + proc.start() + proc.join() + if proc.exitcode != 0: + raise RuntimeError("Error generating Glance OpenAPI schma") + return Path(target_dir, "openapi_specs", "image", "v2.yaml") + + def _generate(self, target_dir, args): + from glance.api.v2 import router + from glance.common import config + from oslo_config import fixture as cfg_fixture + + self._config_fixture = self.useFixture(cfg_fixture.Config()) + + config.parse_args(args=[]) + + self.router = router.API(routes.Mapper()) + + work_dir = Path(target_dir) + work_dir.mkdir(parents=True, exist_ok=True) + + impl_path = Path(work_dir, "openapi_specs", "image", "v2.yaml") + impl_path.parent.mkdir(parents=True, exist_ok=True) + + openapi_spec = self.load_openapi(impl_path) + if not openapi_spec: + openapi_spec = SpecSchema( + info=dict( + title="OpenStack Image API", + description=LiteralScalarString( + "Image API provided by Glance service" + ), + version=self.api_version, + ), + openapi="3.1.0", + security=[{"ApiKeyAuth": []}], + components=dict( + securitySchemes={ + "ApiKeyAuth": { + "type": "apiKey", + "in": "header", + "name": "X-Auth-Token", + } + }, + ), + ) + + # Set global headers and parameters + for name, definition in IMAGE_PARAMETERS.items(): + openapi_spec.components.parameters[name] = ParameterSchema( + **definition + ) + for name, definition in IMAGE_HEADERS.items(): + openapi_spec.components.headers[name] = HeaderSchema(**definition) + + for route in self.router.map.matchlist: + if not route.conditions: + continue + self._process_route(route, openapi_spec, ver_prefix="/v2") + + self._sanitize_param_ver_info(openapi_spec, self.min_api_version) + + if args.api_ref_src: + merge_api_ref_doc(openapi_spec, args.api_ref_src) + + self.dump_openapi(openapi_spec, impl_path, args.validate) + + return impl_path + + def _post_process_operation_hook( + self, openapi_spec, operation_spec, path: str | None = None + ): + """Hook to allow service specific generator to modify details""" + operationId = operation_spec.operationId + + if operationId == "images:get": + for pname in [ + "limit", + "marker", + "name", + "id", + "owner", + "protected", + "status", + "tag", + "visibility", + "os_hidden", + "member_status", + "size_max", + "size_min", + "created_at", + "updated_at", + "sort_dir", + "sort_key", + "sort", + ]: + ref = f"#/components/parameters/{pname}" + if ref not in [x.ref for x in operation_spec.parameters]: + operation_spec.parameters.append(ParameterSchema(ref=ref)) + elif operationId == "images:post": + key = "OpenStack-image-store-ids" + ref = f"#/components/headers/{key}" + operation_spec.responses["201"].setdefault("headers", {}) + operation_spec.responses["201"]["headers"].update( + {key: {"$ref": ref}} + ) + + elif operationId == "images/image_id/file:put": + for ref in [ + "#/components/parameters/content-type", + "#/components/parameters/x-image-meta-store", + ]: + if ref not in [x.ref for x in operation_spec.parameters]: + operation_spec.parameters.append(ParameterSchema(ref=ref)) + elif operationId == "images/image_id/file:get": + for ref in [ + "#/components/parameters/range", + ]: + if ref not in [x.ref for x in operation_spec.parameters]: + operation_spec.parameters.append(ParameterSchema(ref=ref)) + for code in ["200", "206"]: + operation_spec.responses[code].setdefault("headers", {}) + for hdr in ["Content-Type", "Content-Md5", "Content-Length"]: + operation_spec.responses[code]["headers"].setdefault( + hdr, + {"$ref": f"#/components/headers/{hdr}"}, + ) + operation_spec.responses["206"]["headers"].setdefault( + "Content-Range", + {"$ref": "#/components/headers/Content-Range"}, + ) + + def _get_schema_ref( + self, + openapi_spec, + name, + description=None, + schema_def=None, + action_name=None, + ): + from glance.api.v2 import image_members + from glance.api.v2 import images + from glance.api.v2 import metadef_namespaces + from glance.api.v2 import metadef_objects + from glance.api.v2 import metadef_properties + from glance.api.v2 import metadef_resource_types + from glance.api.v2 import metadef_tags + from glance.api.v2 import tasks + from glance import schema as glance_schema + + ref: str + mime_type: str = "application/json" + + if name == "TasksListResponse": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema( + **{ + "name": "tasks", + "type": "object", + "properties": { + "schema": {"type": "string"}, + "tasks": { + "type": "array", + "items": { + "type": "object", + "properties": copy.deepcopy( + schema_def.properties + ), + }, + }, + }, + } + ), + ) + ref = f"#/components/schemas/{name}" + elif name.startswith("Schemas") and name.endswith("Response"): + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(type="string", description="Schema data as string"), + ) + ref = f"#/components/schemas/{name}" + elif name == "ImagesTasksGet_Task_InfoResponse": + openapi_spec.components.schemas.setdefault( + name, + self._get_glance_schema( + glance_schema.CollectionSchema( + "tasks", tasks.get_task_schema() + ), + name, + ), + ) + ref = f"#/components/schemas/{name}" + elif name == "ImagesImportImport_ImageRequest": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema( + **{ + "type": "object", + "properties": { + "method": { + "type": "object", + "properties": { + "name": {"type": "string"}, + "uri": {"type": "string"}, + "glance_image_id": {"type": "string"}, + "glance_region": {"type": "string"}, + "glance_service_interface": { + "type": "string" + }, + }, + }, + "stores": { + "type": "array", + "items": {"type": "string"}, + }, + "all_stores": {"type": "boolean"}, + "all_stores_must_success": {"type": "boolean"}, + }, + } + ), + ) + ref = f"#/components/schemas/{name}" + elif name == "ImagesImportImport_ImageResponse": + openapi_spec.components.schemas.setdefault(name, TypeSchema()) + ref = f"#/components/schemas/{name}" + elif name == "ImagesListResponse": + openapi_spec.components.schemas.setdefault( + name, + self._get_glance_schema(images.get_collection_schema(), name), + ) + ref = f"#/components/schemas/{name}" + elif name == "ImagesMembersListResponse": + openapi_spec.components.schemas.setdefault( + name, + self._get_glance_schema( + image_members.get_collection_schema(), name + ), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "InfoImportGet_Image_ImportResponse", + ]: + openapi_spec.components.schemas.setdefault( + name, + TypeSchema( + **{ + "type": "object", + "properties": { + "import-methods": { + "type": "object", + "properties": { + "description": {"type": "string"}, + "type": {"type": "string"}, + "value": { + "type": "array", + "items": {"type": "string"}, + }, + }, + } + }, + } + ), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "InfoStoresGet_StoresResponse", + ]: + openapi_spec.components.schemas.setdefault( + name, + TypeSchema( + **{ + "type": "object", + "properties": { + "stores": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "description": {"type": "string"}, + "default": {"type": "boolean"}, + }, + }, + } + }, + } + ), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "InfoStoresDetailGet_Stores_DetailResponse", + ]: + openapi_spec.components.schemas.setdefault( + name, + TypeSchema( + **{ + "type": "object", + "properties": { + "stores": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": {"type": "string"}, + "description": {"type": "string"}, + "default": {"type": "boolean"}, + "type": {"type": "string"}, + "weight": {"type": "number"}, + "properties": { + "type": "object", + "additionalProperties": True, + }, + }, + }, + } + }, + } + ), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "MetadefsNamespacesListResponse", + ]: + openapi_spec.components.schemas.setdefault( + name, + self._get_glance_schema( + metadef_namespaces.get_collection_schema(), name + ), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "MetadefsNamespacesObjectsListResponse", + ]: + openapi_spec.components.schemas.setdefault( + name, + self._get_glance_schema( + metadef_objects.get_collection_schema(), name + ), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "MetadefsNamespacesPropertiesListResponse", + ]: + openapi_spec.components.schemas.setdefault( + name, + self._get_glance_schema( + metadef_properties.get_collection_schema(), name + ), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "MetadefsResource_TypesListResponse", + ]: + openapi_spec.components.schemas.setdefault( + name, + self._get_glance_schema( + metadef_resource_types.get_collection_schema(), name + ), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "MetadefsNamespacesTagsListResponse", + ]: + openapi_spec.components.schemas.setdefault( + name, + self._get_glance_schema( + metadef_tags.get_collection_schema(), name + ), + ) + ref = f"#/components/schemas/{name}" + elif name == "ImageUpdateRequest": + # openapi_spec.components.schemas.setdefault( + # name, + # self._get_glance_schema( + # metadef_tags.get_collection_schema(), name + # ), + # ) + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**{"type": "string", "format": "RFC 6902"}), + ) + mime_type = "application/openstack-images-v2.1-json-patch" + ref = f"#/components/schemas/{name}" + elif name in [ + "ImagesFileUploadRequest", + ]: + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**{"type": "string", "format": "binary"}), + ) + ref = f"#/components/schemas/{name}" + mime_type = "application/octet-stream" + elif name in [ + "ImagesFileDownloadResponse", + ]: + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**{"type": "string", "format": "binary"}), + ) + ref = f"#/components/schemas/{name}" + mime_type = "application/octet-stream" + elif name in [ + "ImagesFileUploadResponse", + "ImagesFileDownloadResponse", + ]: + return (None, None) + elif schema_def: + # Schema is known and is not an exception + + openapi_spec.components.schemas.setdefault( + name, self._get_glance_schema(schema_def, name) + ) + ref = f"#/components/schemas/{name}" + + else: + (ref, mime_type) = super()._get_schema_ref( + openapi_spec, name, description, schema_def=schema_def + ) + return (ref, mime_type) + + def _get_glance_schema(self, schema, name: str | None = None): + res = replace_refs(schema.raw(), proxies=False) + res.pop("definitions", None) + if "properties" in res and "type" not in res: + res["type"] = "object" + # List of image props that are by default integer, but in real life + # are surely going i64 side + i32_fixes = ["size", "virtual_size"] + if name and name == "ImagesListResponse": + for field in i32_fixes: + res["properties"]["images"]["items"]["properties"][field][ + "format" + ] = "int64" + if name and name == "ImageShowResponse": + for field in i32_fixes: + res["properties"][field]["format"] = "int64" + return TypeSchema(**res) + + @classmethod + def _get_response_codes(cls, method: str, operationId: str) -> list[str]: + response_codes = super()._get_response_codes(method, operationId) + if operationId == "images/image_id/file:put": + response_codes = ["204"] + if operationId == "images/image_id/file:get": + response_codes = ["200", "204", "206"] + return response_codes diff --git a/codegenerator/openapi/keystone.py b/codegenerator/openapi/keystone.py new file mode 100644 index 0000000..0c0150c --- /dev/null +++ b/codegenerator/openapi/keystone.py @@ -0,0 +1,483 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import inspect +from multiprocessing import Process +import logging +from pathlib import Path + +from ruamel.yaml.scalarstring import LiteralScalarString + +from codegenerator.common.schema import ParameterSchema +from codegenerator.common.schema import PathSchema +from codegenerator.common.schema import SpecSchema +from codegenerator.common.schema import TypeSchema +from codegenerator.openapi.base import OpenStackServerSourceBase +from codegenerator.openapi.keystone_schemas import application_credential +from codegenerator.openapi.keystone_schemas import auth +from codegenerator.openapi.keystone_schemas import common +from codegenerator.openapi.keystone_schemas import domain +from codegenerator.openapi.keystone_schemas import endpoint +from codegenerator.openapi.keystone_schemas import federation +from codegenerator.openapi.keystone_schemas import group +from codegenerator.openapi.keystone_schemas import project +from codegenerator.openapi.keystone_schemas import region +from codegenerator.openapi.keystone_schemas import role +from codegenerator.openapi.keystone_schemas import service +from codegenerator.openapi.keystone_schemas import user +from codegenerator.openapi.utils import merge_api_ref_doc + + +class KeystoneGenerator(OpenStackServerSourceBase): + URL_TAG_MAP = { + "/versions": "version", + } + + RESOURCE_MODULES = [ + application_credential, + auth, + common, + domain, + endpoint, + federation, + group, + project, + region, + role, + service, + user, + ] + + def __init__(self): + self.api_version = "3.0" + self.min_api_version = "3.14" + + def _api_ver_major(self, ver): + return ver._ver_major + + def _api_ver_minor(self, ver): + return ver._ver_minor + + def _api_ver(self, ver): + return (ver._ver_major, ver._ver_minor) + + def generate(self, target_dir, args): + proc = Process(target=self._generate, args=[target_dir, args]) + proc.start() + proc.join() + if proc.exitcode != 0: + raise RuntimeError("Error generating Keystone OpenAPI schema") + return Path(target_dir, "openapi_specs", "identity", "v3.yaml") + + def _generate(self, target_dir, args, *pargs, **kwargs): + from keystone.server.flask import application + + self.app = application.application_factory() + self.router = self.app.url_map + + work_dir = Path(target_dir) + work_dir.mkdir(parents=True, exist_ok=True) + + impl_path = Path(work_dir, "openapi_specs", "identity", "v3.yaml") + impl_path.parent.mkdir(parents=True, exist_ok=True) + + openapi_spec = self.load_openapi(impl_path) + if not openapi_spec: + openapi_spec = SpecSchema( + info=dict( + title="OpenStack Identity API", + description=LiteralScalarString( + "Identity API provided by Keystone service" + ), + version=self.api_version, + ), + openapi="3.1.0", + security=[{"ApiKeyAuth": []}], + components=dict( + securitySchemes={ + "ApiKeyAuth": { + "type": "apiKey", + "in": "header", + "name": "X-Auth-Token", + } + }, + headers={ + "X-Auth-Token": { + "description": "A valid authentication token", + "schema": {"type": "string", "format": "secret"}, + }, + "X-Subject-Token": { + "description": "A valid authentication token", + "schema": {"type": "string", "format": "secret"}, + }, + "Openstack-Auth-Receipt": { + "description": "The auth receipt. A partially successful authentication response returns the auth receipt ID in this header rather than in the response body.", + "schema": {"type": "string"}, + }, + }, + parameters={ + "X-Auth-Token": { + "in": "header", + "name": "X-Auth-Token", + "description": "A valid authentication token", + "schema": {"type": "string", "format": "secret"}, + }, + "X-Subject-Token": { + "in": "header", + "name": "X-Subject-Token", + "description": "The authentication token. An authentication response returns the token ID in this header rather than in the response body.", + "schema": {"type": "string", "format": "secret"}, + "required": True, + }, + }, + ), + ) + + for route in self.router.iter_rules(): + if route.rule.startswith("/static"): + continue + # if not route.rule.startswith("/v3/domains"): + # continue + if "/credentials/OS-EC2" in route.rule: + continue + + self._process_route(route, openapi_spec) + + self._sanitize_param_ver_info(openapi_spec, self.min_api_version) + + if args.api_ref_src: + merge_api_ref_doc( + openapi_spec, args.api_ref_src, allow_strip_version=False + ) + + self.dump_openapi(openapi_spec, impl_path, args.validate) + + return impl_path + + def _process_route(self, route, openapi_spec): + args = route.arguments + # ep = route.endpoint + view = self.app.view_functions[route.endpoint] + controller = None + if hasattr(view, "view_class"): + controller = view.view_class + + path = "" + path_elements = [] + operation_spec = None + tag_name = None + + for part in route.rule.split("/"): + if not part: + continue + if part.startswith("<"): + param = part.strip("<>").split(":") + path_elements.append("{" + param[-1] + "}") + else: + if not tag_name and part != "" and part != "v3": + tag_name = part + path_elements.append(part) + + if not tag_name: + tag_name = "versions" + + path = "/" + "/".join(path_elements) + if tag_name not in [x["name"] for x in openapi_spec.tags]: + openapi_spec.tags.append( + {"name": tag_name, "description": LiteralScalarString("")} + ) + # Get rid of /v3 for further processing + path_elements = path_elements[1:] + + # Build path parameters (/foo/{foo_id}/bar/{id} => $foo_id, $foo_bar_id) + # Since for same path we are here multiple times check presence of + # parameter before adding new params + path_params: list[ParameterSchema] = [] + path_resource_names: list[str] = [] + for path_element in path_elements: + if "{" in path_element: + param_name = path_element.strip("{}") + global_param_name = ( + "_".join(path_resource_names) + f"_{param_name}" + ) + # if global_param_name == "_project_id": + # global_param_name = "project_id" + param_ref_name = f"#/components/parameters/{global_param_name}" + # Ensure reference to the param is in the path_params + if param_ref_name not in [ + k.ref for k in [p for p in path_params] + ]: + path_params.append(ParameterSchema(ref=param_ref_name)) + # Ensure global parameter is present + path_param = ParameterSchema( + location="path", name=param_name, required=True + ) + # openapi_spec.components.parameters.setdefault(global_param_name, dict()) + if not path_param.description: + path_param.description = LiteralScalarString( + f"{param_name} parameter for {path} API" + ) + # We can only assume the param type. For path it is logically a string only + path_param.type_schema = TypeSchema(type="string") + openapi_spec.components.parameters[global_param_name] = ( + path_param + ) + else: + path_resource_names.append(path_element.replace("-", "_")) + if len(path_elements) == 0: + path_resource_names.append("root") + elif path_elements[-1].startswith("{"): + rn = path_resource_names[-1] + if rn.endswith("ies"): + rn = rn.replace("ies", "y") + else: + rn = rn.rstrip("s") + path_resource_names[-1] = rn + if path == "/v3/domains/{domain_id}/config/{group}": + path_resource_names.append("group") + elif path == "/v3/domains/config/{group}/{option}/default": + path_resource_names.append("group") + elif path == "/v3/domains/{domain_id}/config/{group}/{option}": + path_resource_names.extend(["group", "option"]) + + path_spec = openapi_spec.paths.setdefault( + path, PathSchema(parameters=path_params) + ) + # Set operationId + if path == "/": + operation_id_prefix = "versions" + elif path == "/v3": + operation_id_prefix = "version" + else: + operation_id_prefix = "/".join( + [x.strip("{}") for x in path_elements] + ) + for method in route.methods: + if method == "OPTIONS": + # Not sure what should be done with it + continue + if controller: + func = getattr( + controller, method.replace("HEAD", "GET").lower(), None + ) + else: + func = view + # Set operationId + operation_id = operation_id_prefix + f":{method.lower()}" # noqa + # There is a variety of operations that make absolutely no sense and + # are just not filtered by Keystone itself + if path == "/v3/users/{user_id}/password" and method in [ + "GET", + "HEAD", + ]: + continue + + # Current Keystone code is having a bug of exposing same controller + # API for both /RESOURCE and /RESOURCE/{ID}. Routing is then + # failing to invoke the method because of missing parameter, so + # analyse and skip those now. + if not func: + continue + sig = inspect.signature(func) + for param in args: + if param not in sig.parameters: + logging.warn( + "Skipping %s:%s because controller does not support parameter %s", + path, + method, + param, + ) + func = None + break + for param in sig.parameters.values(): + if ( + param.name not in ["self"] + and param.default == param.empty + and param.name not in args + ): + # Param with no default is not a path argument + logging.warn( + "Skipping %s:%s because controller requires parameter %s not present in path", + path, + method, + param, + ) + func = None + break + + if not func: + continue + + operation_spec = getattr(path_spec, method.lower()) + if not operation_spec.operationId: + operation_spec.operationId = operation_id + doc = inspect.getdoc(func) + if not operation_spec.description: + operation_spec.description = LiteralScalarString( + doc or f"{method} operation on {path}" + ) + if tag_name and tag_name not in operation_spec.tags: + operation_spec.tags.append(tag_name) + + self.process_operation( + func, + path, + openapi_spec, + operation_spec, + path_resource_names, + method=method, + ) + + return operation_spec + + def process_operation( + self, + func, + path, + openapi_spec, + operation_spec, + path_resource_names, + *, + method=None, + ): + logging.info( + "Operation: %s [%s]", + path, + method, + ) + if method in ["PUT", "POST", "PATCH"]: + # This is clearly a modification operation but we know nothing about request + schema_name = ( + "".join([x.title() for x in path_resource_names]) + + method.title() + + "Request" + ) + + (schema_ref, mime_type) = self._get_schema_ref( + openapi_spec, + schema_name, + description=f"Request of the {operation_spec.operationId} operation", + ) + + if schema_ref: + content = operation_spec.requestBody = {"content": {}} + content["content"][mime_type] = { + "schema": {"$ref": schema_ref} + } + + responses_spec = operation_spec.responses + # Errors + for error in ["403", "404"]: + responses_spec.setdefault(str(error), dict(description="Error")) + # Response data + if method == "POST": + response_code = "201" + if method == "PUT": + response_code = "201" + elif method == "DELETE": + response_code = "204" + else: + response_code = "200" + if path == "/v3/projects/{project_id}/tags/{value}" and method in [ + "GET", + "HEAD", + ]: + response_code = "204" + elif path in [ + "/v3/projects/{project_id}/users/{user_id}/roles/{role_id}", + "/v3/domains/{project_id}/users/{user_id}/roles/{role_id}", + ] and method in ["GET", "HEAD", "PUT"]: + response_code = "204" + elif path in [ + "/v3/projects/{project_id}/groups/{user_id}/roles/{role_id}", + "/v3/domains/{project_id}/groups/{user_id}/roles/{role_id}", + ] and method in ["GET", "HEAD", "PUT"]: + response_code = "204" + elif path == "/v3/users/{user_id}/password" and method == "POST": + response_code = "204" + rsp = responses_spec.setdefault(response_code, dict(description="Ok")) + if response_code != "204" and method not in ["DELETE", "HEAD"]: + # Arrange response placeholder + schema_name = ( + "".join([x.title() for x in path_resource_names]) + + method.title() + + "Response" + ) + (schema_ref, mime_type) = self._get_schema_ref( + openapi_spec, + schema_name, + description=f"Response of the {operation_spec.operationId} operation", + ) + + if schema_ref: + rsp["content"] = {mime_type: {"schema": {"$ref": schema_ref}}} + + if path == "/v3/auth/tokens": + rsp_headers = rsp.setdefault("headers", {}) + if method == "POST": + openapi_spec.components.headers["X-Subject-Token"] = { + "description": "API Authorization token", + "schema": {"type": "string"}, + } + rsp_headers.setdefault( + "X-Subject-Token", + {"$ref": "#/components/headers/X-Subject-Token"}, + ) + operation_spec.security = [] + elif method == "GET": + operation_spec.parameters.append( + ParameterSchema( + ref="#/components/parameters/X-Subject-Token" + ) + ) + rsp_headers.setdefault( + "X-Subject-Token", + {"$ref": "#/components/headers/X-Subject-Token"}, + ) + + self._post_process_operation_hook( + openapi_spec, operation_spec, path=path + ) + + def _post_process_operation_hook( + self, openapi_spec, operation_spec, path: str | None = None + ): + """Hook to allow service specific generator to modify details""" + for resource_mod in self.RESOURCE_MODULES: + hook = getattr(resource_mod, "_post_process_operation_hook", None) + if hook: + hook(openapi_spec, operation_spec, path=path) + + def _get_schema_ref( + self, + openapi_spec, + name, + description=None, + schema_def=None, + action_name=None, + ): + # Invoke modularized schema _get_schema_ref + for resource_mod in self.RESOURCE_MODULES: + hook = getattr(resource_mod, "_get_schema_ref", None) + if hook: + (ref, mime_type, matched) = hook( + openapi_spec, name, description, schema_def, action_name + ) + if matched: + return (ref, mime_type) + + # Default + (ref, mime_type) = super()._get_schema_ref( + openapi_spec, name, description, action_name=action_name + ) + + return (ref, mime_type) diff --git a/codegenerator/openapi/keystone_schemas/__init__.py b/codegenerator/openapi/keystone_schemas/__init__.py new file mode 100644 index 0000000..1e280c4 --- /dev/null +++ b/codegenerator/openapi/keystone_schemas/__init__.py @@ -0,0 +1,11 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. diff --git a/codegenerator/openapi/keystone_schemas/application_credential.py b/codegenerator/openapi/keystone_schemas/application_credential.py new file mode 100644 index 0000000..2383ce3 --- /dev/null +++ b/codegenerator/openapi/keystone_schemas/application_credential.py @@ -0,0 +1,183 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import copy + +from typing import Any + +from keystone.application_credential import ( + schema as application_credential_schema, +) + +from codegenerator.common.schema import TypeSchema +from codegenerator.common.schema import ParameterSchema +from codegenerator.openapi.keystone_schemas import common + +# Application Credentials +APPLICATION_CREDENTIAL_ACCESS_RULES_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "access_rules": copy.deepcopy( + application_credential_schema._access_rules_properties + ), + "links": copy.deepcopy(common.LINKS_SCHEMA), + }, +} + +APPLICATION_CREDENTIAL_ACCESS_RULE_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "access_rule": copy.deepcopy( + application_credential_schema._access_rules_properties["items"] + ), + }, +} + +APPLICATION_CREDENTIAL_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "The ID of the application credential.", + }, + "project_id": { + "type": "string", + "format": "uuid", + "description": "The ID of the project the application credential was created for and that authentication requests using this application credential will be scoped to.", + }, + **application_credential_schema._application_credential_properties, + }, +} +APPLICATION_CREDENTIAL_SCHEMA["properties"].pop("secret", None) + +APPLICATION_CREDENTIAL_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "application_credential": copy.deepcopy(APPLICATION_CREDENTIAL_SCHEMA) + }, +} + +APPLICATION_CREDENTIAL_CREATE_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "application_credential": copy.deepcopy( + application_credential_schema.application_credential_create + ) + }, +} + +APPLICATION_CREDENTIAL_CREATE_RESPONSE_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "application_credential": copy.deepcopy(APPLICATION_CREDENTIAL_SCHEMA) + }, +} +# Update `secret` field +APPLICATION_CREDENTIAL_CREATE_RESPONSE_SCHEMA["properties"][ + "application_credential" +]["properties"]["secret"] = { + "type": "string", + "description": "The secret for the application credential, either generated by the server or provided by the user. This is only ever shown once in the response to a create request. It is not stored nor ever shown again. If the secret is lost, a new application credential must be created.", +} + +APPLICATION_CREDENTIALS_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "application_credentials": { + "type": "array", + "items": copy.deepcopy(APPLICATION_CREDENTIAL_SCHEMA), + }, + }, +} + +APPLICATION_CREDENTIALS_LIST_PARAMETERS = { + "application_credentials_name": { + "in": "query", + "name": "name", + "description": "The name of the application credential. Must be unique to a user.", + "schema": {"type": "string"}, + }, +} + + +def _post_process_operation_hook( + openapi_spec, operation_spec, path: str | None = None +): + """Hook to allow service specific generator to modify details""" + operationId = operation_spec.operationId + if operationId == "users/user_id/application_credentials:get": + for ( + key, + val, + ) in APPLICATION_CREDENTIALS_LIST_PARAMETERS.items(): + openapi_spec.components.parameters.setdefault( + key, ParameterSchema(**val) + ) + ref = f"#/components/parameters/{key}" + if ref not in [x.ref for x in operation_spec.parameters]: + operation_spec.parameters.append(ParameterSchema(ref=ref)) + + +def _get_schema_ref( + openapi_spec, + name, + description=None, + schema_def=None, + action_name=None, +) -> tuple[str | None, str | None, bool]: + mime_type: str = "application/json" + ref: str + # ### Application Credentials + if name == "UsersAccess_RuleGetResponse": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**APPLICATION_CREDENTIAL_ACCESS_RULE_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name == "UsersAccess_RulesGetResponse": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**APPLICATION_CREDENTIAL_ACCESS_RULES_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name == "UsersApplication_CredentialsGetResponse": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**APPLICATION_CREDENTIALS_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "UsersApplication_CredentialGetResponse", + ]: + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**APPLICATION_CREDENTIAL_CONTAINER_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name == "UsersApplication_CredentialsPostRequest": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**APPLICATION_CREDENTIAL_CREATE_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name in "UsersApplication_CredentialsPostResponse": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**APPLICATION_CREDENTIAL_CREATE_RESPONSE_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + + else: + return (None, None, False) + + return (ref, mime_type, True) diff --git a/codegenerator/openapi/keystone_schemas/auth.py b/codegenerator/openapi/keystone_schemas/auth.py new file mode 100644 index 0000000..64e45b9 --- /dev/null +++ b/codegenerator/openapi/keystone_schemas/auth.py @@ -0,0 +1,634 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import copy + +from typing import Any + +from codegenerator.common.schema import TypeSchema +from codegenerator.openapi.keystone_schemas import common + + +SCOPE_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "The authorization scope, including the system (Since v3.10), a project, or a domain (Since v3.4). If multiple scopes are specified in the same request (e.g. project and domain or domain and system) an HTTP 400 Bad Request will be returned, as a token cannot be simultaneously scoped to multiple authorization targets. An ID is sufficient to uniquely identify a project but if a project is specified by name, then the domain of the project must also be specified in order to uniquely identify the project by name. A domain scope may be specified by either the domain’s ID or name with equivalent results.", + "properties": { + "project": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "Project Name", + }, + "id": { + "type": "string", + "description": "Project Id", + }, + "domain": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "Project domain Id", + }, + "name": { + "type": "string", + "description": "Project domain name", + }, + }, + }, + }, + }, + "domain": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "Domain id", + }, + "name": { + "type": "string", + "description": "Domain name", + }, + }, + }, + "OS-TRUST:trust": { + "type": "object", + "properties": { + "id": { + "type": "string", + }, + }, + }, + "system": { + "type": "object", + "properties": { + "all": {"type": "boolean"}, + }, + }, + }, +} + + +AUTH_TOKEN_ISSUE_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "auth": { + "type": "object", + "description": "An auth object.", + "properties": { + "identity": { + "type": "object", + "description": "An identity object.", + "properties": { + "methods": { + "type": "array", + "description": "The authentication method.", + "items": { + "type": "string", + "enum": [ + "password", + "token", + "totp", + "application_credential", + ], + }, + }, + "password": { + "type": "object", + "description": "The password object, contains the authentication information.", + "properties": { + "user": { + "type": "object", + "description": "A `user` object", + "properties": { + "id": { + "type": "string", + "description": "User ID", + }, + "name": { + "type": "string", + "description": "User Name", + }, + "password": { + "type": "string", + "format": "password", + "description": "User Password", + }, + "domain": { + "type": "object", + "description": "User Domain object", + "properties": { + "id": { + "type": "string", + "description": "User Domain ID", + }, + "name": { + "type": "string", + "description": "User Domain Name", + }, + }, + }, + }, + }, + }, + }, + "token": { + "type": "object", + "description": "A `token` object", + "properties": { + "id": { + "type": "string", + "format": "password", + "description": "Authorization Token value", + }, + }, + "required": [ + "id", + ], + }, + "totp": { + "type": "object", + "description": "Multi Factor Authentication information", + "properties": { + "user": { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "The user ID", + }, + "name": { + "type": "string", + "description": "The user name", + }, + "domain": { + "type": "object", + "properties": { + "id": { + "type": "string", + }, + "name": { + "type": "string", + }, + }, + }, + "passcode": { + "type": "string", + "format": "password", + "description": "MFA passcode", + }, + }, + "required": ["passcode"], + }, + }, + "required": [ + "user", + ], + }, + "application_credential": { + "type": "object", + "description": "An application credential object.", + "properties": { + "id": { + "type": "string", + "descripion": "The ID of the application credential used for authentication. If not provided, the application credential must be identified by its name and its owning user.", + }, + "name": { + "type": "string", + "descripion": "The name of the application credential used for authentication. If provided, must be accompanied by a user object.", + }, + "secret": { + "type": "string", + "format": "password", + "description": "The secret for authenticating the application credential.", + }, + "user": { + "type": "object", + "description": "A user object, required if an application credential is identified by name and not ID.", + "properties": { + "id": { + "type": "string", + "description": "The user ID", + }, + "name": { + "type": "string", + "description": "The user name", + }, + "domain": { + "type": "object", + "properties": { + "id": { + "type": "string", + }, + "name": { + "type": "string", + }, + }, + }, + }, + }, + }, + "required": ["secret"], + }, + }, + "required": [ + "methods", + ], + }, + "scope": SCOPE_SCHEMA, + }, + "required": [ + "identity", + ], + }, + }, +} + +AUTH_PROJECTS_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "projects": { + "type": "array", + "items": { + "type": "object", + "properties": { + "domain_id": { + "type": "string", + "format": "uuid", + "description": "The ID of the domain for the project.", + }, + "id": { + "type": "string", + "format": "uuid", + "description": "The ID of the project.", + }, + "name": { + "type": "string", + "description": "The name of the project", + }, + "enabled": { + "type": "boolean", + "description": "If set to true, project is enabled. If set to false, project is disabled.", + }, + "links": copy.deepcopy(common.LINKS_SCHEMA), + }, + }, + }, + "links": copy.deepcopy(common.LINKS_SCHEMA), + }, +} + +AUTH_DOMAINS_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "domains": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "The ID of the domain.", + }, + "name": { + "type": "string", + "description": "The name of the domain", + }, + "description": { + "type": "string", + "description": "The description of the domain.", + }, + "enabled": { + "type": "boolean", + "description": "If set to true, domain is enabled. If set to false, domain is disabled.", + }, + "links": copy.deepcopy(common.LINKS_SCHEMA), + }, + }, + }, + "links": copy.deepcopy(common.LINKS_SCHEMA), + }, +} + +AUTH_SYSTEMS_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "system": { + "type": "array", + "description": "A list of systems to access based on role assignments.", + "items": { + "type": "object", + "additionalProperties": {"type": "boolean"}, + }, + } + }, +} + +AUTH_CATALOG_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "catalog": { + "type": "array", + "items": { + "type": "object", + "properties": { + "endpoints": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "The endpoint UUID", + }, + "interface": { + "type": "string", + "enum": ["public", "internal", "admin"], + }, + "region": { + "type": "string", + "description": "Region name of the endpoint", + }, + "url": { + "type": "string", + "format": "uri", + "description": "The endpoint url", + }, + }, + }, + }, + "id": { + "type": "string", + "format": "uuid", + "description": "The UUID of the service to which the endpoint belongs.", + }, + "type": { + "type": "string", + "description": "The service type, which describes the API implemented by the service", + }, + "name": { + "type": "string", + "description": "The service name.", + }, + }, + }, + } + }, +} + +AUTH_USER_INFO_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "A user object", + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "A user UUID", + }, + "name": {"type": "string", "description": "A user name"}, + "domain": { + "type": "object", + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "A user domain UUID", + }, + "name": { + "type": "string", + "description": "A user domain name", + }, + }, + }, + "password_expires_at": { + "type": "string", + "format": "date-time", + "description": "DateTime of the user password expiration", + }, + "OS-FEDERATION": {"type": "object"}, + }, +} + +AUTH_TOKEN_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "token": { + "type": "object", + "properties": { + "audit_ids": { + "type": "array", + "description": "A list of one or two audit IDs. An audit ID is a unique, randomly generated, URL-safe string that you can use to track a token. The first audit ID is the current audit ID for the token. The second audit ID is present for only re-scoped tokens and is the audit ID from the token before it was re-scoped. A re- scoped token is one that was exchanged for another token of the same or different scope. You can use these audit IDs to track the use of a token or chain of tokens across multiple requests and endpoints without exposing the token ID to non-privileged users.", + "items": {"type": "string"}, + }, + "catalog": { + "description": "A catalog object.", + **AUTH_CATALOG_SCHEMA["properties"]["catalog"], + }, + "expires_at": { + "type": "string", + "format": "date-time", + "description": "The date and time when the token expires.", + }, + "issues_at": { + "type": "string", + "format": "date-time", + "description": "The date and time when the token was issued.", + }, + "methods": { + "type": "array", + "description": "The authentication methods, which are commonly password, token, or other methods. Indicates the accumulated set of authentication methods that were used to obtain the token. For example, if the token was obtained by password authentication, it contains password. Later, if the token is exchanged by using the token authentication method one or more times, the subsequently created tokens contain both password and token in their methods attribute. Unlike multi-factor authentication, the methods attribute merely indicates the methods that were used to authenticate the user in exchange for a token. The client is responsible for determining the total number of authentication factors.", + "items": {"type": "string"}, + }, + "user": copy.deepcopy(AUTH_USER_INFO_SCHEMA), + }, + } + }, +} + +AUTH_SCOPED_TOKEN_SCHEMA: dict[str, Any] = copy.deepcopy(AUTH_TOKEN_SCHEMA) +AUTH_SCOPED_TOKEN_SCHEMA["properties"]["token"]["properties"].update( + **{ + "is_domain": { + "type": "boolean", + }, + "domain": { + "type": "object", + "description": "A domain object including the id and name representing the domain the token is scoped to. This is only included in tokens that are scoped to a domain.", + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "A domain UUID", + }, + "name": { + "type": "string", + "description": "A domain name", + }, + }, + }, + "project": { + "type": "object", + "description": "A project object including the id, name and domain object representing the project the token is scoped to. This is only included in tokens that are scoped to a project.", + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "A user domain UUID", + }, + "name": { + "type": "string", + "description": "A user domain name", + }, + }, + }, + "roles": { + "type": "array", + "description": "A list of role objects", + "items": { + "type": "object", + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "A role UUID", + }, + "name": { + "type": "string", + "description": "A role name", + }, + }, + }, + }, + "system": { + "type": "object", + "description": 'A system object containing information about which parts of the system the token is scoped to. If the token is scoped to the entire deployment system, the system object will consist of {"all": true}. This is only included in tokens that are scoped to the system.', + "additionalProperties": {"type": "boolean"}, + }, + } +) + +AUTH_RECEIPT_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "receipt": { + "type": "object", + "properties": { + "expires_at": { + "type": "string", + "format": "date-time", + "description": "The date and time when the token expires.", + }, + "issues_at": { + "type": "string", + "format": "date-time", + "description": "The date and time when the token was issued.", + }, + "methods": { + "type": "array", + "description": "The authentication methods, which are commonly password, token, or other methods. Indicates the accumulated set of authentication methods that were used to obtain the token. For example, if the token was obtained by password authentication, it contains password. Later, if the token is exchanged by using the token authentication method one or more times, the subsequently created tokens contain both password and token in their methods attribute. Unlike multi-factor authentication, the methods attribute merely indicates the methods that were used to authenticate the user in exchange for a token. The client is responsible for determining the total number of authentication factors.", + "items": {"type": "string"}, + }, + "user": copy.deepcopy(AUTH_USER_INFO_SCHEMA), + }, + }, + "required_auth_methods": { + "type": "array", + "items": {"type": "string"}, + "description": "A list of authentication rules that may be used with the auth receipt to complete the authentication process.", + }, + }, +} + + +def _post_process_operation_hook( + openapi_spec, operation_spec, path: str | None = None +): + """Hook to allow service specific generator to modify details""" + operationId = operation_spec.operationId + + if operationId == "auth/tokens:post": + (receipt_schema_ref, receipt_mime_type, matched) = _get_schema_ref( + openapi_spec, "AuthReceiptSchema" + ) + operation_spec.responses["401"] = { + "description": "Unauthorized", + "headers": { + "Openstack-Auth-Receipt": { + "$ref": "#/components/headers/Openstack-Auth-Receipt" + } + }, + "content": { + receipt_mime_type: {"schema": {"$ref": receipt_schema_ref}} + }, + } + + +def _get_schema_ref( + openapi_spec, + name, + description=None, + schema_def=None, + action_name=None, +) -> tuple[str | None, str | None, bool]: + mime_type: str = "application/json" + ref: str + + # Auth + if name == "AuthTokensPostRequest": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**AUTH_TOKEN_ISSUE_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name in ["AuthTokensGetResponse", "AuthTokensPostResponse"]: + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**AUTH_SCOPED_TOKEN_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name == "AuthReceiptSchema": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**AUTH_RECEIPT_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "AuthProjectsGetResponse", + ]: + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**AUTH_PROJECTS_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "AuthDomainsGetResponse", + ]: + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**AUTH_DOMAINS_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name == "AuthSystemGetResponse": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**AUTH_SYSTEMS_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name == "AuthCatalogGetResponse": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**AUTH_CATALOG_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + + else: + return (None, None, False) + + return (ref, mime_type, True) diff --git a/codegenerator/openapi/keystone_schemas/common.py b/codegenerator/openapi/keystone_schemas/common.py new file mode 100644 index 0000000..1c86ca9 --- /dev/null +++ b/codegenerator/openapi/keystone_schemas/common.py @@ -0,0 +1,43 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import copy + +from typing import Any + +from keystone.resource import schema as ks_schema + + +LINK_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "Links to the resources in question. See [API Guide / Links and References](https://docs.openstack.org/api-guide/compute/links_and_references.html) for more info.", + "properties": { + "href": {"type": "string", "format": "uri"}, + "rel": {"type": "string"}, + }, +} + +LINKS_SCHEMA: dict[str, Any] = { + "type": "array", + "description": "Links to the resources in question. See [API Guide / Links and References](https://docs.openstack.org/api-guide/compute/links_and_references.html) for more info.", + "items": copy.deepcopy(LINK_SCHEMA), +} + + +TAG_SCHEMA: dict[str, Any] = copy.deepcopy( + ks_schema._project_tag_name_properties +) + +TAGS_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"tags": ks_schema._project_tags_list_properties}, +} diff --git a/codegenerator/openapi/keystone_schemas/domain.py b/codegenerator/openapi/keystone_schemas/domain.py new file mode 100644 index 0000000..14e6ea0 --- /dev/null +++ b/codegenerator/openapi/keystone_schemas/domain.py @@ -0,0 +1,165 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +from typing import Any + +from keystone.resource import schema as ks_schema + +from codegenerator.common.schema import TypeSchema + + +DOMAIN_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "id": {"type": "string", "format": "uuid", "readOnly": True}, + **ks_schema._domain_properties, + }, + "additionalProperties": True, +} + +DOMAINS_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"domains": {"type": "array", "items": DOMAIN_SCHEMA}}, +} + + +DOMAIN_CONFIG_GROUP_LDAP = { + "type": "object", + "description": "An ldap object. Required to set the LDAP group configuration options.", + "properties": { + "url": { + "type": "string", + "format": "uri", + "description": "The LDAP URL.", + }, + "user_tree_dn": { + "type": "string", + "description": "The base distinguished name (DN) of LDAP, from where all users can be reached. For example, ou=Users,dc=root,dc=org.", + }, + }, + "additionalProperties": True, +} + +DOMAIN_CONFIG_GROUP_IDENTITY = { + "type": "object", + "description": "An identity object.", + "properties": { + "driver": { + "type": "string", + "description": "The Identity backend driver.", + }, + }, + "additionalProperties": True, +} + +DOMAIN_CONFIGS_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "config": { + "type": "object", + "description": "A config object.", + "properties": { + "identity": DOMAIN_CONFIG_GROUP_IDENTITY, + "ldap": DOMAIN_CONFIG_GROUP_LDAP, + }, + } + }, +} + +DOMAIN_CONFIG_SCHEMA: dict[str, Any] = { + "oneOf": [ + DOMAIN_CONFIG_GROUP_IDENTITY, + DOMAIN_CONFIG_GROUP_LDAP, + ] +} + + +def _post_process_operation_hook( + openapi_spec, operation_spec, path: str | None = None +): + """Hook to allow service specific generator to modify details""" + pass + + +def _get_schema_ref( + openapi_spec, + name, + description=None, + schema_def=None, + action_name=None, +) -> tuple[str | None, str | None, bool]: + mime_type: str = "application/json" + ref: str + # Domains + if name in [ + "DomainsPostResponse", + "DomainGetResponse", + "DomainPatchResponse", + ]: + openapi_spec.components.schemas.setdefault( + "Domain", TypeSchema(**DOMAIN_SCHEMA) + ) + ref = "#/components/schemas/Domain" + elif name == "DomainsPostRequest": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**ks_schema.domain_create) + ) + ref = f"#/components/schemas/{name}" + elif name == "DomainPatchRequest": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**ks_schema.domain_update) + ) + ref = f"#/components/schemas/{name}" + elif name == "DomainsGetResponse": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**DOMAINS_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + + # Domain Config + elif name in [ + "DomainsConfigDefaultGetResponse", + "DomainsConfigGetResponse", + "DomainsConfigPutRequest", + "DomainsConfigPutResponse", + "DomainsConfigPatchResponse", + "DomainsConfigPatchRequest", + "DomainsConfigPatchResponse", + "DomainsConfigDefaultGetResponse", + ]: + openapi_spec.components.schemas.setdefault( + "DomainConfig", + TypeSchema(**DOMAIN_CONFIGS_SCHEMA), + ) + ref = "#/components/schemas/DomainConfig" + elif name in [ + "DomainsConfigGroupGetResponse", + "DomainsConfigGroupPatchRequest", + "DomainsConfigGroupPatchResponse", + "DomainsConfigGroupPatchResponse", + "DomainsConfigGroupPatchResponse", + "DomainsConfigDefaultGroupGetResponse", + "DomainsConfigGroupOptionPatchResponse", + "DomainsConfigGroupOptionGetResponse", + "DomainsConfigGroupOptionPatchRequest", + ]: + openapi_spec.components.schemas.setdefault( + "DomainConfigGroup", + TypeSchema(**DOMAIN_CONFIG_SCHEMA), + ) + ref = "#/components/schemas/DomainConfigGroup" + + else: + return (None, None, False) + + return (ref, mime_type, True) diff --git a/codegenerator/openapi/keystone_schemas/endpoint.py b/codegenerator/openapi/keystone_schemas/endpoint.py new file mode 100644 index 0000000..c2024f0 --- /dev/null +++ b/codegenerator/openapi/keystone_schemas/endpoint.py @@ -0,0 +1,159 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import copy + +from typing import Any + +from codegenerator.common.schema import TypeSchema +from codegenerator.common.schema import ParameterSchema + +ENDPOINT_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "enabled": { + "type": "boolean", + "description": "Defines whether the service and its endpoints appear in the service catalog.", + }, + "id": { + "type": "string", + "format": "uuid", + "description": "The UUID of the service to which the endpoint belongs.", + "readOnly": True, + }, + "interface": { + "type": "string", + "enum": ["internal", "admin", "public"], + "description": "The interface type, which describes the visibility of the Value is: - public. Visible by end users on a publicly available network interface. - internal. Visible by end users on an unmetered internal network interface. - admin. Visible by administrative users on a secure network interface.", + }, + "region": { + "type": "string", + "description": "The geographic location of the service endpoint.", + "x-openstack": {"max-ver": "3.2"}, + }, + "region_id": { + "type": "string", + "format": "uuid", + "description": "The geographic location of the service ", + "x-openstack": {"min-ver": "3.2"}, + }, + "service_id": { + "type": "string", + "format": "uuid", + "description": "The UUID of the service to which the endpoint belongs.", + }, + "url": { + "type": "string", + "format": "uri", + "description": "The endpoint URL.", + }, + }, +} + +ENDPOINT_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"endpoint": ENDPOINT_SCHEMA}, +} + +ENDPOINTS_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"endpoints": {"type": "array", "items": ENDPOINT_SCHEMA}}, +} + +ENDPOINTS_LIST_PARAMETERS = { + "endpoint_service_id": { + "in": "query", + "name": "service_id", + "description": "Filters the response by a service ID.", + "schema": {"type": "string", "format": "uuid"}, + }, + "endpoint_region_id": { + "in": "query", + "name": "region", + "description": "Filters the response by a region ID.", + "schema": {"type": "string", "format": "uuid"}, + }, + "endpoint_interface": { + "in": "query", + "name": "interface", + "description": "Filters the response by an interface.", + "schema": {"type": "string", "enum": ["public", "internal", "admin"]}, + }, +} + +ENDPOINT_CREATE_SCHEMA: dict[str, Any] = copy.deepcopy( + ENDPOINT_CONTAINER_SCHEMA +) +ENDPOINT_CREATE_SCHEMA["properties"]["endpoint"]["properties"].pop("id") +ENDPOINT_CREATE_SCHEMA["properties"]["endpoint"]["required"] = [ + "interface", + "service_id", + "url", +] + + +def _post_process_operation_hook( + openapi_spec, operation_spec, path: str | None = None +): + """Hook to allow service specific generator to modify details""" + operationId = operation_spec.operationId + if operationId == "endpoints:get": + for ( + key, + val, + ) in ENDPOINTS_LIST_PARAMETERS.items(): + openapi_spec.components.parameters.setdefault( + key, ParameterSchema(**val) + ) + ref = f"#/components/parameters/{key}" + if ref not in [x.ref for x in operation_spec.parameters]: + operation_spec.parameters.append(ParameterSchema(ref=ref)) + + +def _get_schema_ref( + openapi_spec, + name, + description=None, + schema_def=None, + action_name=None, +) -> tuple[str | None, str | None, bool]: + mime_type: str = "application/json" + ref: str + # ### Endpoints + if name == "EndpointsGetResponse": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**ENDPOINTS_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "EndpointGetResponse", + "EndpointsPostRequest", + "EndpointsPostResponse", + "EndpointPatchResponse", + ]: + openapi_spec.components.schemas.setdefault( + "Endpoint", + TypeSchema(**ENDPOINT_CONTAINER_SCHEMA), + ) + ref = "#/components/schemas/Endpoint" + elif name == "EndpointsPostRequest": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**ENDPOINT_CREATE_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + + else: + return (None, None, False) + + return (ref, mime_type, True) diff --git a/codegenerator/openapi/keystone_schemas/federation.py b/codegenerator/openapi/keystone_schemas/federation.py new file mode 100644 index 0000000..463a337 --- /dev/null +++ b/codegenerator/openapi/keystone_schemas/federation.py @@ -0,0 +1,447 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +from typing import Any + +from jsonref import replace_refs + +from keystone.federation import schema as federation_schema +from keystone.federation import utils as federation_mapping_schema + +from codegenerator.common.schema import ParameterSchema +from codegenerator.common.schema import TypeSchema +from codegenerator.openapi.keystone_schemas import auth + + +IDENTITY_PROVIDER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "The Identity Provider unique ID", + }, + "description": { + "type": "string", + "description": "The Identity Provider description", + }, + "domain_id": { + "type": "string", + "format": "uuid", + "description": "The ID of a domain that is associated with the Identity Provider.", + }, + "authorization_ttl": { + "type": "integer", + "description": "The length of validity in minutes for group memberships carried over through mapping and persisted in the database.", + }, + "enabled": { + "type": "boolean", + "description": "Whether the Identity Provider is enabled or not", + }, + "remote_ids": { + "type": "array", + "description": "List of the unique Identity Provider’s remote IDs", + "items": {"type": "string"}, + }, + }, +} + +IDENTITY_PROVIDER_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"identity_provider": IDENTITY_PROVIDER_SCHEMA}, +} + +IDENTITY_PROVIDER_CREATE_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "identity_provider": federation_schema.identity_provider_create + }, +} + +IDENTITY_PROVIDER_UPDATE_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "identity_provider": federation_schema.identity_provider_update + }, +} + +IDENTITY_PROVIDERS_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "identity_providers": { + "type": "array", + "items": IDENTITY_PROVIDER_SCHEMA, + } + }, +} + +IDENTITY_PROVIDERS_LIST_PARAMETERS: dict[str, Any] = { + "idp_id": { + "in": "query", + "name": "id", + "description": "Filter for Identity Providers’ ID attribute", + "schema": {"type": "string"}, + }, + "idp_enabled": { + "in": "query", + "name": "enabled", + "description": "Filter for Identity Providers’ enabled attribute", + "schema": {"type": "boolean"}, + }, +} + +IDENTITY_PROVIDER_PROTOCOL_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "The federation protocol ID", + }, + "mapping_id": {"type": "string"}, + "remote_id_attribute": {"type": "string", "maxLength": 64}, + }, +} + +IDENTITY_PROVIDER_PROTOCOL_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"protocol": IDENTITY_PROVIDER_PROTOCOL_SCHEMA}, +} + +IDENTITY_PROVIDER_PROTOCOLS_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "protocols": { + "type": "array", + "items": IDENTITY_PROVIDER_PROTOCOL_SCHEMA, + } + }, +} + +IDENTITY_PROVIDER_PROTOCOL_CREATE_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"protocol": federation_schema.protocol_create}, +} + +IDENTITY_PROVIDER_PROTOCOL_UPDATE_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"protocol": federation_schema.protocol_update}, +} + +MAPPING_PROPERTIES = replace_refs( + federation_mapping_schema.MAPPING_SCHEMA, proxies=False +) +MAPPING_PROPERTIES.pop("definitions", None) +MAPPING_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "id": { + "type": "string", + "description": "The Federation Mapping unique ID", + }, + **MAPPING_PROPERTIES["properties"], + }, +} + +MAPPING_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"mapping": MAPPING_SCHEMA}, +} + +MAPPINGS_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"mappings": {"type": "array", "items": MAPPING_SCHEMA}}, +} + +MAPPING_CREATE_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"mapping": MAPPING_PROPERTIES}, +} + +FEDERATION_SERVICE_PROVIDER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "auth_url": { + "type": "string", + "description": "The URL to authenticate against", + }, + "description": { + "type": ["string", "null"], + "description": "The description of the Service Provider", + }, + "id": { + "type": "string", + "description": "The Service Provider unique ID", + }, + "enabled": { + "type": "boolean", + "description": "Whether the Service Provider is enabled or not", + }, + "relay_state_prefix": { + "type": ["string", "null"], + "description": "The prefix of the RelayState SAML attribute", + }, + "sp_url": { + "type": "string", + "description": "The Service Provider’s URL", + }, + }, + "required": ["auth_url", "sp_url"], +} + +FEDERATION_SERVICE_PROVIDER_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"service_provider": FEDERATION_SERVICE_PROVIDER_SCHEMA}, +} + +FEDERATION_SERVICE_PROVIDERS_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "service_providers": { + "type": "array", + "items": FEDERATION_SERVICE_PROVIDER_SCHEMA, + } + }, +} + +FEDERATION_SERVICE_PROVIDER_CREATE_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "service_provider": federation_schema.service_provider_create + }, +} + +FEDERATION_SERVICE_PROVIDER_UPDATE_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "service_provider": federation_schema.service_provider_update + }, +} + + +def _post_process_operation_hook( + openapi_spec, operation_spec, path: str | None = None +): + """Hook to allow service specific generator to modify details""" + operationId = operation_spec.operationId + if operationId == "OS-FEDERATION/identity_providers:get": + for ( + key, + val, + ) in IDENTITY_PROVIDERS_LIST_PARAMETERS.items(): + openapi_spec.components.parameters.setdefault( + key, ParameterSchema(**val) + ) + ref = f"#/components/parameters/{key}" + if ref not in [x.ref for x in operation_spec.parameters]: + operation_spec.parameters.append(ParameterSchema(ref=ref)) + elif operationId in [ + "OS-FEDERATION/projects:get", + "OS-FEDERATION/projects:head", + "OS-FEDERATION/domains:get", + "OS-FEDERATION/domains:head", + "endpoints/endpoint_id/OS-ENDPOINT-POLICY/policy:get", + ]: + operation_spec.deprecated = True + + +def _get_schema_ref( + openapi_spec, + name, + description=None, + schema_def=None, + action_name=None, +) -> tuple[str | None, str | None, bool]: + mime_type: str = "application/json" + ref: str | None + if name == "Os_FederationProjectsGetResponse": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**auth.AUTH_PROJECTS_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "Os_FederationDomainsGetResponse", + ]: + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**auth.AUTH_DOMAINS_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "AuthOs_FederationSaml2PostRequest", + "AuthOs_FederationSaml2EcpPostRequest", + ]: + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**auth.AUTH_TOKEN_ISSUE_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "AuthOs_FederationSaml2PostResponse", + "AuthOs_FederationSaml2EcpPostResponse", + ]: + mime_type = "text/xml" + openapi_spec.components.schemas.setdefault( + name, + TypeSchema( + type="string", + format="xml", + descripion="SAML assertion in XML format", + ), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "AuthOs_FederationWebssoGetResponse", + "AuthOs_FederationWebssoPostResponse", + "AuthOs_FederationIdentity_ProvidersProtocolsWebssoGetResponse", + "AuthOs_FederationIdentity_ProvidersProtocolsWebssoPostResponse", + "Os_FederationIdentity_ProvidersProtocolsAuthGetResponse", + "Os_FederationIdentity_ProvidersProtocolsAuthPostResponse", + ]: + # Federation based auth returns unscoped token even it is not + # described explicitly in apiref + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**auth.AUTH_TOKEN_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "AuthOs_FederationWebssoPostRequest", + "AuthOs_FederationIdentity_ProvidersProtocolsWebssoPostRequest", + ]: + ref = None + # ### Identity provider + elif name == "Os_FederationIdentity_ProvidersGetResponse": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**IDENTITY_PROVIDERS_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "Os_FederationIdentity_ProviderGetResponse", + "Os_FederationIdentity_ProviderPutResponse", + "Os_FederationIdentity_ProviderPatchResponse", + ]: + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**IDENTITY_PROVIDER_CONTAINER_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name == "Os_FederationIdentity_ProviderPutRequest": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**IDENTITY_PROVIDER_CREATE_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name == "Os_FederationIdentity_ProviderPatchRequest": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**IDENTITY_PROVIDER_UPDATE_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + # ### Identity provider protocols + elif name == "Os_FederationIdentity_ProvidersProtocolsGetResponse": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**IDENTITY_PROVIDER_PROTOCOLS_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "Os_FederationIdentity_ProvidersProtocolGetResponse", + "Os_FederationIdentity_ProvidersProtocolPutResponse", + "Os_FederationIdentity_ProvidersProtocolPatchResponse", + ]: + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**IDENTITY_PROVIDER_PROTOCOL_CONTAINER_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name == "Os_FederationIdentity_ProvidersProtocolPutRequest": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**IDENTITY_PROVIDER_PROTOCOL_CREATE_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name == "Os_FederationIdentity_ProvidersProtocolPatchRequest": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**IDENTITY_PROVIDER_PROTOCOL_UPDATE_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + # ### Identity provider mapping + elif name == "Os_FederationMappingsGetResponse": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**MAPPINGS_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "Os_FederationMappingGetResponse", + "Os_FederationMappingPutResponse", + "Os_FederationMappingPatchResponse", + ]: + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**MAPPING_CONTAINER_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "Os_FederationMappingPutRequest", + "Os_FederationMappingPatchRequest", + ]: + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**MAPPING_CREATE_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + # ### Identity provider service provider + elif name == "Os_FederationService_ProvidersGetResponse": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**FEDERATION_SERVICE_PROVIDERS_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "Os_FederationService_ProviderGetResponse", + "Os_FederationService_ProviderPutResponse", + "Os_FederationService_ProviderPatchResponse", + ]: + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**FEDERATION_SERVICE_PROVIDER_CONTAINER_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name == "Os_FederationService_ProviderPutRequest": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**FEDERATION_SERVICE_PROVIDER_CREATE_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name == "Os_FederationService_ProviderPatchRequest": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**FEDERATION_SERVICE_PROVIDER_UPDATE_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + # SAML2 Metadata + elif name == "Os_FederationSaml2MetadataGetResponse": + mime_type = "text/xml" + openapi_spec.components.schemas.setdefault( + name, + TypeSchema( + type="string", + format="xml", + descripion="Identity Provider metadata information in XML format", + ), + ) + ref = f"#/components/schemas/{name}" + else: + return (None, None, False) + + return (ref, mime_type, True) diff --git a/codegenerator/openapi/keystone_schemas/group.py b/codegenerator/openapi/keystone_schemas/group.py new file mode 100644 index 0000000..4d95def --- /dev/null +++ b/codegenerator/openapi/keystone_schemas/group.py @@ -0,0 +1,124 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +from typing import Any + +from keystone.identity import schema as identity_schema + +from codegenerator.common.schema import ParameterSchema +from codegenerator.common.schema import TypeSchema +from codegenerator.openapi.keystone_schemas import user + + +GROUP_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "id": {"type": "string", "format": "uuid", "readOnly": True}, + **identity_schema._group_properties, + }, +} + +GROUP_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"group": GROUP_SCHEMA}, +} + +GROUPS_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"groups": {"type": "array", "items": GROUP_SCHEMA}}, +} + +GROUPS_LIST_PARAMETERS: dict[str, Any] = { + "group_domain_id": { + "in": "query", + "name": "domain_id", + "description": "Filters the response by a domain ID.", + "schema": {"type": "string", "format": "uuid"}, + }, +} + +GROUP_USERS_LIST_PARAMETERS: dict[str, Any] = { + "group_user_password_expires_at": { + "in": "query", + "name": "password_expires_at", + "description": "Filter results based on which user passwords have expired. The query should include an operator and a timestamp with a colon (:) separating the two, for example: `password_expires_at={operator}:{timestamp}`.\nValid operators are: `lt`, `lte`, `gt`, `gte`, `eq`, and `neq`.\nValid timestamps are of the form: YYYY-MM-DDTHH:mm:ssZ.", + "schema": {"type": "string", "format": "date-time"}, + }, +} + + +def _post_process_operation_hook( + openapi_spec, operation_spec, path: str | None = None +): + """Hook to allow service specific generator to modify details""" + operationId = operation_spec.operationId + + if operationId == "groups:get": + for key, val in GROUPS_LIST_PARAMETERS.items(): + openapi_spec.components.parameters.setdefault( + key, ParameterSchema(**val) + ) + ref = f"#/components/parameters/{key}" + if ref not in [x.ref for x in operation_spec.parameters]: + operation_spec.parameters.append(ParameterSchema(ref=ref)) + + elif operationId == "groups/group_id/users:get": + for key, val in GROUP_USERS_LIST_PARAMETERS.items(): + openapi_spec.components.parameters.setdefault( + key, ParameterSchema(**val) + ) + ref = f"#/components/parameters/{key}" + if ref not in [x.ref for x in operation_spec.parameters]: + operation_spec.parameters.append(ParameterSchema(ref=ref)) + + +def _get_schema_ref( + openapi_spec, + name, + description=None, + schema_def=None, + action_name=None, +) -> tuple[str | None, str | None, bool]: + mime_type: str = "application/json" + ref: str + # Groups + if name == "GroupsGetResponse": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**GROUPS_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "GroupsPostRequest", + "GroupsPostResponse", + "GroupGetResponse", + "GroupPatchRequest", + "GroupPatchResponse", + ]: + openapi_spec.components.schemas.setdefault( + "Group", TypeSchema(**GROUP_CONTAINER_SCHEMA) + ) + ref = "#/components/schemas/Group" + elif name == "GroupsUsersGetResponse": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**user.USERS_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "GroupsUserGetResponse", + "GroupsUserPutRequest", + "GroupsUserPutResponse", + ]: + return (None, None, True) + else: + return (None, None, False) + + return (ref, mime_type, True) diff --git a/codegenerator/openapi/keystone_schemas/project.py b/codegenerator/openapi/keystone_schemas/project.py new file mode 100644 index 0000000..753a227 --- /dev/null +++ b/codegenerator/openapi/keystone_schemas/project.py @@ -0,0 +1,157 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +from typing import Any + +from keystone.resource import schema as ks_schema + +from codegenerator.common.schema import TypeSchema +from codegenerator.common.schema import ParameterSchema +from codegenerator.openapi.keystone_schemas import common + + +PROJECT_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "id": {"type": "string", "format": "uuid", "readOnly": True}, + **ks_schema._project_properties, + }, + "additionalProperties": True, +} + +PROJECT_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "project": { + "type": "object", + "properties": { + "id": {"type": "string", "format": "uuid", "readOnly": True}, + **ks_schema._project_properties, + }, + "additionalProperties": True, + }, + }, +} + +PROJECTS_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"projects": {"type": "array", "items": PROJECT_SCHEMA}}, +} + +PROJECT_LIST_PARAMETERS = { + "project_domain_id": { + "in": "query", + "name": "domain_id", + "description": "Filters the response by a domain ID.", + "schema": {"type": "string", "format": "uuid"}, + }, + "project_enabled": { + "in": "query", + "name": "enabled", + "description": "If set to true, then only enabled projects will be returned. Any value other than 0 (including no value) will be interpreted as true.", + "schema": {"type": "boolean"}, + }, + "project_is_domain": { + "in": "query", + "name": "is_domain", + "description": "If this is specified as true, then only projects acting as a domain are included. Otherwise, only projects that are not acting as a domain are included.", + "schema": {"type": "boolean"}, + "x-openstack": {"min-ver": "3.6"}, + }, + "project_name": { + "in": "query", + "name": "name", + "description": "Filters the response by a resource name.", + "schema": {"type": "string"}, + }, + "project_parent_id": { + "in": "query", + "name": "parent_id", + "description": "Filters the response by a parent ID.", + "schema": {"type": "string", "format": "uuid"}, + "x-openstack": {"min-ver": "3.4"}, + }, +} + + +def _post_process_operation_hook( + openapi_spec, operation_spec, path: str | None = None +): + """Hook to allow service specific generator to modify details""" + operationId = operation_spec.operationId + if operationId == "projects:get": + for ( + key, + val, + ) in PROJECT_LIST_PARAMETERS.items(): + openapi_spec.components.parameters.setdefault( + key, ParameterSchema(**val) + ) + ref = f"#/components/parameters/{key}" + if ref not in [x.ref for x in operation_spec.parameters]: + operation_spec.parameters.append(ParameterSchema(ref=ref)) + + +def _get_schema_ref( + openapi_spec, + name, + description=None, + schema_def=None, + action_name=None, +) -> tuple[str | None, str | None, bool]: + mime_type: str = "application/json" + ref: str + # Projects + if name in [ + "ProjectsPostRequest", + "ProjectsPostResponse", + "ProjectPatchRequest", + "ProjectPatchResponse", + "ProjectGetResponse", + ]: + openapi_spec.components.schemas.setdefault( + "Project", + TypeSchema(**PROJECT_CONTAINER_SCHEMA), + ) + ref = "#/components/schemas/Project" + elif name == "ProjectsGetResponse": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**PROJECTS_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + + # Project Tags + elif name == "ProjectsTagPutRequest": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**ks_schema.project_tag_create) + ) + ref = f"#/components/schemas/{name}" + elif name == "ProjectsTagsPutRequest": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**ks_schema.project_tags_update) + ) + ref = f"#/components/schemas/{name}" + elif name == "ProjectsTagsGetResponse": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**common.TAGS_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name == "ProjectsTagsPutResponse": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**common.TAGS_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + + else: + return (None, None, False) + + return (ref, mime_type, True) diff --git a/codegenerator/openapi/keystone_schemas/region.py b/codegenerator/openapi/keystone_schemas/region.py new file mode 100644 index 0000000..01449cd --- /dev/null +++ b/codegenerator/openapi/keystone_schemas/region.py @@ -0,0 +1,109 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +from typing import Any + +from codegenerator.common.schema import TypeSchema +from codegenerator.common.schema import ParameterSchema + +REGION_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "description": { + "type": "string", + "description": "The region description.", + }, + "id": { + "type": "string", + "format": "uuid", + "description": "The ID for the region.", + "readOnly": True, + }, + "parent_id": { + "type": "string", + "format": "uuid", + "description": "To make this region a child of another region, set this parameter to the ID of the parent region.", + }, + }, +} + +REGION_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"region": REGION_SCHEMA}, +} + +REGIONS_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"regions": {"type": "array", "items": REGION_SCHEMA}}, +} + +REGIONS_LIST_PARAMETERS = { + "region_parent_region_id": { + "in": "query", + "name": "parent_region_id", + "description": "Filters the response by a parent region, by ID.", + "schema": {"type": "string", "format": "uuid"}, + }, +} + + +def _post_process_operation_hook( + openapi_spec, operation_spec, path: str | None = None +): + """Hook to allow service specific generator to modify details""" + operationId = operation_spec.operationId + if operationId == "regions:get": + for ( + key, + val, + ) in REGIONS_LIST_PARAMETERS.items(): + openapi_spec.components.parameters.setdefault( + key, ParameterSchema(**val) + ) + ref = f"#/components/parameters/{key}" + if ref not in [x.ref for x in operation_spec.parameters]: + operation_spec.parameters.append(ParameterSchema(ref=ref)) + + +def _get_schema_ref( + openapi_spec, + name, + description=None, + schema_def=None, + action_name=None, +) -> tuple[str | None, str | None, bool]: + mime_type: str = "application/json" + ref: str + # ### Regions + if name == "RegionsGetResponse": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**REGIONS_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "RegionGetResponse", + "RegionsPostRequest", + "RegionsPostResponse", + "RegionPatchRequest", + "RegionPatchResponse", + ]: + openapi_spec.components.schemas.setdefault( + "Region", + TypeSchema(**REGION_CONTAINER_SCHEMA), + ) + ref = "#/components/schemas/Region" + + else: + return (None, None, False) + + return (ref, mime_type, True) diff --git a/codegenerator/openapi/keystone_schemas/role.py b/codegenerator/openapi/keystone_schemas/role.py new file mode 100644 index 0000000..ad67011 --- /dev/null +++ b/codegenerator/openapi/keystone_schemas/role.py @@ -0,0 +1,436 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +from typing import Any + +from keystone.assignment import schema as assignment_schema + +from codegenerator.common.schema import ParameterSchema +from codegenerator.common.schema import TypeSchema +from codegenerator.openapi.keystone_schemas import auth + +ROLE_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "A role object.", + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "The role ID.", + "readOnly": True, + }, + "links": { + "type": "object", + "additionalProperties": { + "type": ["string", "null"], + "format": "uri", + }, + "readOnly": True, + }, + **assignment_schema._role_properties, + }, +} + +ROLE_INFO_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "The role ID.", + }, + "name": { + "type": "string", + "description": "The role name.", + }, + "description": { + "type": "string", + "description": "The role description.", + }, + "links": { + "type": "object", + "properties": { + "self": { + "type": "string", + "format": "uri", + "description": "The link to the resource in question.", + } + }, + }, + }, +} + +ROLE_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"role": ROLE_SCHEMA}, +} + +ROLES_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "roles": {"type": "array", "items": ROLE_SCHEMA}, + "links": { + "type": "object", + "additionalProperties": { + "type": ["string", "null"], + "format": "uri", + }, + }, + }, +} + +# List of role info +ROLES_INFO_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "List of roles assigned to the resource", + "properties": { + "roles": {"type": "array", "items": ROLE_INFO_SCHEMA}, + "links": { + "type": "object", + "additionalProperties": { + "type": ["string", "null"], + "format": "uri", + }, + }, + }, +} + +# Role list specific query parameters +ROLE_LIST_PARAMETERS: dict[str, Any] = { + "role_domain_id": { + "in": "query", + "name": "domain_id", + "description": "Filters the response by a domain ID.", + "schema": {"type": "string", "format": "uuid"}, + }, +} + + +ROLE_INFERENCE_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "role_inference": { + "type": "object", + "properties": { + "prior_role": ROLE_INFO_SCHEMA, + "implies": ROLE_INFO_SCHEMA, + }, + } + }, +} + +ROLES_INFERENCE_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "role_inference": { + "type": "object", + "properties": { + "prior_role": ROLE_INFO_SCHEMA, + "implies": { + "type": "array", + "items": ROLE_INFO_SCHEMA, + }, + }, + } + }, +} + +ROLES_INFERENCES_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "role_inferences": { + "type": "array", + "items": { + "type": "object", + "properties": { + "prior_role": ROLE_INFO_SCHEMA, + "implies": { + "type": "array", + "items": ROLE_INFO_SCHEMA, + }, + }, + }, + } + }, +} + +USER_INFO_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "A user object", + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "A user UUID", + }, + "name": {"type": "string", "description": "A user name"}, + "domain": { + "type": "object", + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "A user domain UUID", + }, + "name": { + "type": "string", + "description": "A user domain name", + }, + }, + }, + }, +} + +GROUP_INFO_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "id": {"type": "string", "format": "uuid", "description": "A user ID"}, + "name": {"type": "string", "description": "A user name"}, + }, +} + +ROLE_ASSIGNMENT_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "role": ROLE_INFO_SCHEMA, + "scope": auth.SCOPE_SCHEMA, + "user": USER_INFO_SCHEMA, + "group": GROUP_INFO_SCHEMA, + "links": { + "type": "object", + "properties": { + "assignment": { + "type": "string", + "format": "uri", + "description": "a link to the assignment that gave rise to this entity", + }, + "membership": { + "type": "string", + "format": "uri", + }, + }, + }, + }, +} + +ROLE_ASSIGNMENTS_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "role_assignments": {"type": "array", "items": ROLE_ASSIGNMENT_SCHEMA} + }, +} + +#: Role assignment query parameters common for LIST and HEAD +ROLE_ASSIGNMENTS_QUERY_PARAMETERS: dict[str, Any] = { + "role_assignment_group_id": { + "in": "query", + "name": "group.id", + "description": "Filters the response by a group ID.", + "schema": {"type": "string", "format": "uuid"}, + }, + "role_assignment_role_id": { + "in": "query", + "name": "role.id", + "description": "Filters the response by a role ID.", + "schema": {"type": "string", "format": "uuid"}, + }, + "role_assignment_user_id": { + "in": "query", + "name": "user.id", + "description": "Filters the response by a user ID.", + "schema": {"type": "string", "format": "uuid"}, + }, + "role_assignment_scope_domain_id": { + "in": "query", + "name": "scope.domain.id", + "description": "Filters the response by a domain ID.", + "schema": {"type": "string", "format": "uuid"}, + }, + "role_assignment_scope_project_id": { + "in": "query", + "name": "scope.project.id", + "description": "Filters the response by a project ID.", + "schema": {"type": "string", "format": "uuid"}, + }, + "role_assignment_inherit": { + "in": "query", + "name": "scope.OS-INHERIT:inherited_to", + "description": "Filters based on role assignments that are inherited. The only value of inherited_to that is currently supported is projects.", + "schema": {"type": "string", "format": "uuid"}, + }, +} + +# Role assignments list specific query parameters +ROLE_ASSIGNMENT_LIST_PARAMETERS: dict[str, Any] = { + "role_assignment_effective": { + "in": "query", + "name": "effective", + "description": "Returns the effective assignments, including any assignments gained by virtue of group membership.", + "schema": {"type": "null"}, + "allowEmptyValue": True, + "x-openstack": {"is-flag": True}, + }, + "role_assignment_include_names": { + "in": "query", + "name": "include_names", + "description": "If set, then the names of any entities returned will be include as well as their IDs. Any value other than 0 (including no value) will be interpreted as true.", + "schema": {"type": "null"}, + "allowEmptyValue": True, + "x-openstack": {"min-ver": "3.6", "is-flag": True}, + }, + "role_assignment_include_subtree": { + "in": "query", + "name": "include_subtree", + "description": "If set, then relevant assignments in the project hierarchy below the project specified in the scope.project_id query parameter are also included in the response. Any value other than 0 (including no value) for include_subtree will be interpreted as true.", + "schema": {"type": "null"}, + "allowEmptyValue": True, + "x-openstack": {"min-ver": "3.6", "is-flag": "True"}, + }, +} + + +def _post_process_operation_hook( + openapi_spec, operation_spec, path: str | None = None +): + """Hook to allow service specific generator to modify details""" + operationId = operation_spec.operationId + + if operationId == "roles:get": + for ( + key, + val, + ) in ROLE_LIST_PARAMETERS.items(): + openapi_spec.components.parameters.setdefault( + key, ParameterSchema(**val) + ) + ref = f"#/components/parameters/{key}" + + if ref not in [x.ref for x in operation_spec.parameters]: + operation_spec.parameters.append(ParameterSchema(ref=ref)) + elif operationId == "role_assignments:get": + for map in [ + ROLE_ASSIGNMENTS_QUERY_PARAMETERS, + ROLE_ASSIGNMENT_LIST_PARAMETERS, + ]: + for ( + key, + val, + ) in map.items(): + openapi_spec.components.parameters.setdefault( + key, ParameterSchema(**val) + ) + ref = f"#/components/parameters/{key}" + + if ref not in [x.ref for x in operation_spec.parameters]: + operation_spec.parameters.append(ParameterSchema(ref=ref)) + elif operationId == "role_assignments:head": + for ( + key, + val, + ) in ROLE_ASSIGNMENTS_QUERY_PARAMETERS.items(): + openapi_spec.components.parameters.setdefault( + key, ParameterSchema(**val) + ) + ref = f"#/components/parameters/{key}" + + if ref not in [x.ref for x in operation_spec.parameters]: + operation_spec.parameters.append(ParameterSchema(ref=ref)) + + +def _get_schema_ref( + openapi_spec, + name, + description=None, + schema_def=None, + action_name=None, +) -> tuple[str | None, str | None, bool]: + mime_type: str = "application/json" + ref: str + # Roles + if name == "RolesGetResponse": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**ROLES_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "RolesPostRequest", + "RolesPostResponse", + "RoleGetResponse", + "RolePatchRequest", + "RolePatchResponse", + ]: + openapi_spec.components.schemas.setdefault( + "Role", TypeSchema(**ROLE_CONTAINER_SCHEMA) + ) + ref = "#/components/schemas/Role" + + # Role Implies + elif name == "RolesImpliesGetResponse": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**ROLES_INFERENCE_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name == "RolesImplyGetResponse": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**ROLE_INFERENCE_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name == "RolesImplyPutResponse": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**ROLE_INFERENCE_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + + elif name == "Role_AssignmentsGetResponse": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**ROLE_ASSIGNMENTS_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + # Role Inferences + elif name == "Role_InferencesGetResponse": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**ROLES_INFERENCES_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + + # Project/Domain Roles + elif name in [ + "ProjectsUsersRolesGetResponse", + "DomainsGroupsRolesGetResponse", + "ProjectsGroupsRolesGetResponse", + "DomainsUsersRolesGetResponse", + ]: + openapi_spec.components.schemas.setdefault( + "RolesInfos", TypeSchema(**ROLES_INFO_SCHEMA) + ) + ref = "#/components/schemas/RolesInfos" + elif name in [ + "DomainsUsersRoleGetResponse", + "DomainsUsersRolePutRequest", + "DomainsUsersRolePutResponse", + "DomainsGroupsRoleGetResponse", + "DomainsGroupsRolePutRequest", + "DomainsGroupsRolePutResponse", + "ProjectsUsersRoleGetResponse", + "ProjectsUsersRolePutRequest", + "ProjectsUsersRolePutResponse", + "ProjectsGroupsRoleGetResponse", + "ProjectsGroupsRolePutRequest", + "ProjectsGroupsRolePutResponse", + ]: + return (None, None, True) + + else: + return (None, None, False) + + return (ref, mime_type, True) diff --git a/codegenerator/openapi/keystone_schemas/service.py b/codegenerator/openapi/keystone_schemas/service.py new file mode 100644 index 0000000..763e7e3 --- /dev/null +++ b/codegenerator/openapi/keystone_schemas/service.py @@ -0,0 +1,117 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +from typing import Any + +from codegenerator.common.schema import TypeSchema +from codegenerator.common.schema import ParameterSchema + + +SERVICE_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "description": { + "type": "string", + "description": "The service description.", + }, + "enabled": { + "type": "boolean", + "description": "Defines whether the service and its endpoints appear in the service catalog.", + }, + "id": { + "type": "string", + "format": "uuid", + "description": "The UUID of the service to which the endpoint belongs.", + "readOnly": True, + }, + "name": { + "type": "string", + "description": "The service name.", + }, + "type": { + "type": "string", + "description": "The service type, which describes the API implemented by the ", + }, + }, +} + +SERVICE_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"service": SERVICE_SCHEMA}, +} + +SERVICES_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"services": {"type": "array", "items": SERVICE_SCHEMA}}, +} + +SERVICES_LIST_PARAMETERS = { + "service_type": { + "in": "query", + "name": "service", + "description": "Filters the response by a domain ID.", + "schema": {"type": "string"}, + }, +} + + +def _post_process_operation_hook( + openapi_spec, operation_spec, path: str | None = None +): + """Hook to allow service specific generator to modify details""" + operationId = operation_spec.operationId + if operationId == "services:get": + for ( + key, + val, + ) in SERVICES_LIST_PARAMETERS.items(): + openapi_spec.components.parameters.setdefault( + key, ParameterSchema(**val) + ) + ref = f"#/components/parameters/{key}" + if ref not in [x.ref for x in operation_spec.parameters]: + operation_spec.parameters.append(ParameterSchema(ref=ref)) + + +def _get_schema_ref( + openapi_spec, + name, + description=None, + schema_def=None, + action_name=None, +) -> tuple[str | None, str | None, bool]: + mime_type: str = "application/json" + ref: str + # ### Services + if name == "ServicesGetResponse": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**SERVICES_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "ServicesPostRequest", + "ServicesPostResponse", + "ServiceGetResponse", + "ServicePatchRequest", + "ServicePatchResponse", + ]: + openapi_spec.components.schemas.setdefault( + "Service", + TypeSchema(**SERVICE_CONTAINER_SCHEMA), + ) + ref = "#/components/schemas/Service" + + else: + return (None, None, False) + + return (ref, mime_type, True) diff --git a/codegenerator/openapi/keystone_schemas/user.py b/codegenerator/openapi/keystone_schemas/user.py new file mode 100644 index 0000000..841a920 --- /dev/null +++ b/codegenerator/openapi/keystone_schemas/user.py @@ -0,0 +1,257 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +from typing import Any + +from keystone.identity import schema as identity_schema + +from codegenerator.common.schema import ParameterSchema +from codegenerator.common.schema import TypeSchema + + +USER_LIST_PARAMETERS: dict[str, Any] = { + "domain_id": { + "in": "query", + "name": "domain_id", + "description": "Filters the response by a domain ID.", + "schema": {"type": "string", "format": "uuid"}, + }, + "enabled": { + "in": "query", + "name": "enabled", + "description": "If set to true, then only enabled projects will be returned. Any value other than 0 (including no value) will be interpreted as true.", + "schema": {"type": "boolean"}, + }, + "idp_id": { + "in": "query", + "name": "idp_id", + "description": "Filters the response by a domain ID.", + "schema": {"type": "string", "format": "uuid"}, + }, + "name": { + "in": "query", + "name": "name", + "description": "Filters the response by a resource name.", + "schema": {"type": "string"}, + }, + "password_expires_at": { + "in": "query", + "name": "password_expires_at", + "description": "Filter results based on which user passwords have expired. The query should include an operator and a timestamp with a colon (:) separating the two, for example: `password_expires_at={operator}:{timestamp}`.\nValid operators are: `lt`, `lte`, `gt`, `gte`, `eq`, and `neq`.\nValid timestamps are of the form: YYYY-MM-DDTHH:mm:ssZ.", + "schema": {"type": "string", "format": "date-time"}, + }, + "protocol_id": { + "in": "query", + "name": "protocol_id", + "description": "Filters the response by a protocol ID.", + "schema": {"type": "string", "format": "uuid"}, + }, + "unique_id": { + "in": "query", + "name": "unique_id", + "description": "Filters the response by a unique ID.", + "schema": {"type": "string", "format": "uuid"}, + }, +} + + +USER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "id": {"type": "string", "format": "uuid", "readOnly": True}, + **identity_schema._user_properties, + }, +} + +USER_CREATE_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"user": identity_schema.user_create}, +} + +USER_PATCH_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"user": identity_schema.user_update}, +} + + +USER_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"user": USER_SCHEMA}, +} + +USERS_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"users": {"type": "array", "items": USER_SCHEMA}}, +} + +USER_PWD_CHANGE_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"user": identity_schema.password_change}, +} + +# Set `password` format for password change operation +USER_PWD_CHANGE_SCHEMA["properties"]["user"]["properties"]["password"][ + "format" +] = "password" +USER_PWD_CHANGE_SCHEMA["properties"]["user"]["properties"][ + "original_password" +]["format"] = "password" + +USER_GROUP_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "description": { + "type": "string", + "description": "The description of the group.", + }, + "domain_id": { + "type": "string", + "format": "uuid", + "description": "The ID of the domain of the group.", + }, + "id": { + "type": "string", + "format": "uuid", + "description": "The ID of the group.", + }, + "name": { + "type": "string", + "description": "The name of the group.", + }, + "membership_expires_at": { + "type": "string", + "format": "date-time", + "description": "The date and time when the group membership expires. A null value indicates that the membership never expires.", + "x-openstack": {"min-ver": "3.14"}, + }, + }, +} + +USER_GROUPS_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "groups": { + "type": "array", + "description": "A list of group objects", + "items": USER_GROUP_SCHEMA, + } + }, +} + +USER_PROJECT_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "description": { + "type": "string", + "description": "The description of the project.", + }, + "domain_id": { + "type": "string", + "format": "uuid", + "description": "The ID of the domain of the project.", + }, + "id": { + "type": "string", + "format": "uuid", + "description": "The ID of the project.", + }, + "parent_id": { + "type": "string", + "format": "uuid", + "description": "The parent id of the project.", + }, + "name": { + "type": "string", + "description": "The name of the project.", + }, + }, +} + +USER_PROJECTS_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "projects": { + "type": "array", + "description": "A list of project objects", + "items": USER_PROJECT_SCHEMA, + } + }, +} + + +def _post_process_operation_hook( + openapi_spec, operation_spec, path: str | None = None +): + """Hook to allow service specific generator to modify details""" + operationId = operation_spec.operationId + + if operationId == "users:get": + for key, val in USER_LIST_PARAMETERS.items(): + openapi_spec.components.parameters.setdefault( + key, ParameterSchema(**val) + ) + ref = f"#/components/parameters/{key}" + if ref not in [x.ref for x in operation_spec.parameters]: + operation_spec.parameters.append(ParameterSchema(ref=ref)) + + +def _get_schema_ref( + openapi_spec, + name, + description=None, + schema_def=None, + action_name=None, +) -> tuple[str | None, str | None, bool]: + mime_type: str = "application/json" + ref: str + # Users + if name == "UserPatchRequest": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**USER_PATCH_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name == "UsersPostRequest": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**USER_CREATE_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name == "UsersGetResponse": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**USERS_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name in ["UserGetResponse", "UserPostResponse", "UserPatchResponse"]: + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**USER_CONTAINER_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name == "UsersPasswordPostRequest": + openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**USER_PWD_CHANGE_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name == "UsersGroupsGetResponse": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**USER_GROUPS_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name == "UsersProjectsGetResponse": + openapi_spec.components.schemas.setdefault( + name, TypeSchema(**USER_PROJECTS_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + + else: + return (None, None, False) + + return (ref, mime_type, True) diff --git a/codegenerator/openapi/neutron.py b/codegenerator/openapi/neutron.py new file mode 100644 index 0000000..7602aaa --- /dev/null +++ b/codegenerator/openapi/neutron.py @@ -0,0 +1,1174 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import copy +import logging +from multiprocessing import Process, Manager +from pathlib import Path +import re +import tempfile +from typing import Any + +from routes.base import Route +from ruamel.yaml.scalarstring import LiteralScalarString + +import sqlalchemy + +from codegenerator.common.schema import ParameterSchema +from codegenerator.common.schema import PathSchema +from codegenerator.common.schema import SpecSchema +from codegenerator.common.schema import TypeSchema +from codegenerator.openapi.base import OpenStackServerSourceBase +from codegenerator.openapi.base import VERSION_RE +from codegenerator.openapi import neutron_schemas +from codegenerator.openapi.utils import merge_api_ref_doc + + +PASTE_CONFIG = """ +[composite:neutron] +use = egg:Paste#urlmap +# /: neutronversions_composite +/v2.0: neutronapi_v2_0 + +[composite:neutronapi_v2_0] +use = call:neutron.auth:pipeline_factory +keystone = extensions neutronapiapp_v2_0 + +[composite:neutronversions_composite] +use = call:neutron.auth:pipeline_factory +keystone = neutronversions + +[filter:extensions] +paste.filter_factory = neutron.api.extensions:plugin_aware_extension_middleware_factory + +[app:neutronversions] +paste.app_factory = neutron.pecan_wsgi.app:versions_factory + +[app:neutronapiapp_v2_0] +paste.app_factory = neutron.api.v2.router:APIRouter.factory + """ + + +class NeutronGenerator(OpenStackServerSourceBase): + URL_TAG_MAP = { + "/agents/{agent_id}/dhcp-networks": "dhcp-agent-scheduler", + "/agents": "networking-agents", + "/ports/{port_id}/bindings": "port-bindings", + "/routers/{router_id}/conntrack_helpers/": "routers-conntrack-helper", + "/floatingips/{floatingip_id}/port_forwardings/": "floatingips-port-forwardings", + } + + def __init__(self): + self.api_version = "2.0" + self.min_api_version = "2.0" + + # self.tempdir = tempfile.gettempdir() + + def _build_neutron_db(self, tempdir): + db_path: str = f"sqlite:///{tempdir}/neutron.db" # noqa + engine = sqlalchemy.create_engine(db_path) + from neutron.db.migration.models import head + + db_meta = head.get_metadata() + db_meta.create_all(engine) + return (db_path, engine) + + def process_base_neutron_routes(self, work_dir, processed_routes, args): + """Setup base Neutron with whatever is in the core""" + logging.info("Processing base Neutron") + # Create the default configurations + from neutron.common import config as neutron_config + from neutron.conf.plugins.ml2 import config as ml2_config + + from neutron.db import models # noqa + from neutron_lib import fixture + from oslo_config import cfg + from oslo_db import options as db_options + + tempdir = tempfile.gettempdir() + + fixture.RPCFixture().setUp() + + neutron_config.register_common_config_options() + ml2_config.register_ml2_plugin_opts() + + plugin = "neutron.plugins.ml2.plugin.Ml2Plugin" + cfg.CONF.set_override("core_plugin", plugin) + + cfg.CONF.set_override( + "api_paste_config", Path(tempdir, "api-paste.ini.generator") + ) + with open(Path(tempdir, "api-paste.ini.generator"), "w") as fp: + fp.write(PASTE_CONFIG) + + neutron_config.init([]) + cfg.CONF.set_override( + "service_plugins", + [ + "router", + "metering", + "qos", + "tag", + "flavors", + "auto_allocate", + "segments", + "network_ip_availability", + "network_segment_range", + "revisions", + "timestamp", + "loki", + "log", + "port_forwarding", + "placement", + "conntrack_helper", + # "ovn-router", + # "trunk", + "local_ip", + "ndp_proxy", + ], + ) + cfg.CONF.set_override( + "extension_drivers", + [ + "dns", + "port_security", + "qos", + "data_plane_status", + "dns_domain_ports", + "dns_domain_keywords", + "port_device_profile", + "port_numa_affinity_policy", + "uplink_status_propagation", + "subnet_dns_publis_fixed_ip", + "tag_ports_during_bulk_creation", + "uplink_status_propagation", + "port_hints", + "port_device_profile", + "port_hint_ovs_tx_steering", + ], + group="ml2", + ) + + # Create the DB + db_path, engine = self._build_neutron_db(tempdir) + db_options.set_defaults(cfg.CONF, connection=db_path) + + app_ = neutron_config.load_paste_app("neutron") + router = None + for i, w in app_.applications: + if hasattr(w, "_router"): + # We are only interested in the extensions app with a router + router = w._router + + # Raise an error to signal that we have not found a router + if not router: + raise NotImplementedError + + (impl_path, openapi_spec) = self._read_spec(work_dir) + self._process_router(router, openapi_spec, processed_routes) + + # Add base resource routes exposed as a pecan app + self._process_base_resource_routes(openapi_spec, processed_routes) + + self.dump_openapi(openapi_spec, impl_path, args.validate) + + def process_neutron_with_vpnaas(self, work_dir, processed_routes, args): + """Setup base Neutron with enabled vpnaas""" + logging.info("Processing Neutron with VPNaaS") + from neutron.common import config as neutron_config + from neutron.conf.plugins.ml2 import config as ml2_config + + from neutron.db import models # noqa + from neutron_lib import fixture + from neutron import manager # noqa + from oslo_config import cfg + from oslo_db import options as db_options + + fixture.RPCFixture().setUp() + tempdir = tempfile.gettempdir() + + neutron_config.register_common_config_options() + ml2_config.register_ml2_plugin_opts() + + plugin = "neutron.plugins.ml2.plugin.Ml2Plugin" + cfg.CONF.set_override("core_plugin", plugin) + + cfg.CONF.set_override( + "api_paste_config", Path(tempdir, "api-paste.ini.generator") + ) + with open(Path(tempdir, "api-paste.ini.generator"), "w") as fp: + fp.write(PASTE_CONFIG) + + neutron_config.init([]) + cfg.CONF.set_override( + "service_plugins", + [ + "router", + "vpnaas", + ], + ) + cfg.CONF.set_override( + "service_provider", + [ + "VPN:dummy:neutron_vpnaas.tests.unit.dummy_ipsec.DummyIPsecVPNDriver:default", + ], + group="service_providers", + ) + # Create the DB + db_path, engine = self._build_neutron_db(tempdir) + db_options.set_defaults(cfg.CONF, connection=db_path) + + # Create VPNaaS DB tables + from neutron_vpnaas.db.models import head + + db_meta = head.get_metadata() + db_meta.create_all(engine) + + app_ = neutron_config.load_paste_app("neutron") + for i, w in app_.applications: + if hasattr(w, "_router"): + # We are only interested in the extensions app with a router + router = w._router + + # Raise an error to signal that we have not found a router + if not router: + raise NotImplementedError + + (impl_path, openapi_spec) = self._read_spec(work_dir) + self._process_router(router, openapi_spec, processed_routes) + self.dump_openapi(openapi_spec, impl_path, args.validate) + + def _read_spec(self, work_dir): + """Read the spec from file or create an empty one""" + impl_path = Path(work_dir, "openapi_specs", "network", "v2.yaml") + impl_path.parent.mkdir(parents=True, exist_ok=True) + openapi_spec = self.load_openapi(Path(impl_path)) + if not openapi_spec: + openapi_spec = SpecSchema( + info=dict( + title="OpenStack Network API", + description=LiteralScalarString( + "Network API provided by Neutron service" + ), + version=self.api_version, + ), + openapi="3.1.0", + security=[{"ApiKeyAuth": []}], + tags=[], + paths={}, + components=dict( + securitySchemes={ + "ApiKeyAuth": { + "type": "apiKey", + "in": "header", + "name": "X-Auth-Token", + } + }, + headers={}, + parameters={}, + schemas={}, + ), + ) + return (impl_path, openapi_spec) + + def generate(self, target_dir, args): + work_dir = Path(target_dir) + work_dir.mkdir(parents=True, exist_ok=True) + + # NOTE(gtema): call me paranoic or stupid, but I just gave up fighting + # agains oslo_config and oslo_policy with their global state. It is + # just too painful and takes too much precious time. On multiple + # invocation with different config there are plenty of things remaining + # in the old state. In order to workaroung this just process in + # different processes. + with Manager() as manager: + # Since we may process same route multiple times we need to have a + # shared state + processed_routes = manager.dict() + # Base Neutron + p = Process( + target=self.process_base_neutron_routes, + args=[work_dir, processed_routes, args], + ) + p.start() + p.join() + if p.exitcode != 0: + raise RuntimeError("Error generating Neutron OpenAPI schma") + + # VPNaaS + p = Process( + target=self.process_neutron_with_vpnaas, + args=[work_dir, processed_routes, args], + ) + p.start() + p.join() + if p.exitcode != 0: + raise RuntimeError("Error generating Neutron OpenAPI schma") + + (impl_path, openapi_spec) = self._read_spec(work_dir) + + # post processing cleanup of the spec + self._sanitize_param_ver_info(openapi_spec, self.min_api_version) + + # merge descriptions from api-ref doc + if args.api_ref_src: + merge_api_ref_doc( + openapi_spec, args.api_ref_src, allow_strip_version=False + ) + + self.dump_openapi(openapi_spec, Path(impl_path), args.validate) + + return impl_path + + def _process_router(self, router, openapi_spec, processed_routes): + """Scan through the routes exposed on a router""" + for route in router.mapper.matchlist: + if route.routepath.endswith(".:(format)"): + continue + # if route.routepath != "/networks": + # continue + # if "networks" not in route.routepath: + # continue + if route.routepath.endswith("/edit") or route.routepath.endswith( + "/new" + ): + # NEUTRON folks - please fix + logging.warning( + "Skipping processing %s route", route.routepath + ) + continue + if ( + "/qos/ports" in route.routepath + or "/qos/networks" in route.routepath + ): + # NEUTRON folks - please fix + logging.warning( + "Skipping processing %s route", route.routepath + ) + continue + if ( + route.routepath.endswith("/tags") + and route.conditions["method"][0] == "POST" + ): + logging.warning( + "Skipping processing POST %s route", route.routepath + ) + continue + if route.routepath.startswith("/extensions") and route.conditions[ + "method" + ][0] in ["POST", "DELETE", "PUT"]: + continue + if route.routepath.startswith( + "/availability_zones" + ) and route.conditions["method"][0] in ["POST", "DELETE", "PUT"]: + continue + if route.routepath.startswith( + "/availability_zones/" + ) and route.conditions["method"][0] in ["GET"]: + # There is no "show" for AZ + continue + if route.routepath in ["/quotas/tenant", "/quotas/project"]: + # Tenant and Project quota are not a thing + continue + if route.routepath == "/quotas" and route.conditions["method"][ + 0 + ] in ["POST"]: + # Tenant and Project quota is the same + continue + + self._process_route(route, openapi_spec, processed_routes) + + def _process_base_resource_routes(self, openapi_spec, processed_routes): + """Process base resources exposed through Pecan""" + from neutron import manager + + mgr = manager.NeutronManager.get_instance() + # Nets/subnets/ports are base resources (non extension). They are thus + # missing in the extension middleware + for coll, res in [ + ("networks", "network"), + ("subnets", "subnet"), + ("ports", "port"), + ]: + for method, action in [("GET", "index"), ("POST", "create")]: + self._process_route( + Route( + coll, + f"/{coll}", + conditions={"method": [method]}, + action=action, + _collection_name=coll, + _member_name=res, + ), + openapi_spec, + processed_routes, + controller=mgr.get_controller_for_resource(coll), + ) + for coll, res in [ + ("networks", "network"), + ("subnets", "subnet"), + ("ports", "port"), + ]: + for method, action in [ + ("GET", "show"), + ("DELETE", "delete"), + ("PUT", "update"), + ]: + self._process_route( + Route( + coll, + f"/{coll}/{{{res}_id}}", + conditions={"method": [method]}, + action=action, + _collection_name=coll, + _member_name=res, + ), + openapi_spec, + processed_routes, + controller=mgr.get_controller_for_resource(coll), + ) + self._process_route( + Route( + "port_allowed_address_pair", + "/ports/{port_id}/add_allowed_address_pairs", + conditions={"method": ["PUT"]}, + action="add_allowed_address_pairs", + _collection_name=coll, + _member_name=res, + ), + openapi_spec, + processed_routes, + controller=mgr.get_controller_for_resource("ports"), + ) + + def _process_route( + self, + route, + openapi_spec, + processed_routes, + controller=None, + ver_prefix="/v2.0", + ): + path = ver_prefix + operation_spec = None + for part in route.routelist: + if isinstance(part, dict): + path += "{" + part["name"] + "}" + else: + path += part + + if "method" not in route.conditions: + raise RuntimeError("Method not set for %s" % route) + method = ( + route.conditions.get("method", "GET")[0] + if route.conditions + else "GET" + ) + + wsgi_controller = controller or route.defaults["controller"] + # collection_name = route.collection_name + # member_name = route.member_name + action = route.defaults["action"] + controller = None + func = None + if hasattr(wsgi_controller, "controller"): + controller = wsgi_controller.controller + if hasattr(wsgi_controller, "func"): + func = wsgi_controller.func + else: + controller = wsgi_controller + if hasattr(wsgi_controller, action): + func = getattr(wsgi_controller, action) + + processed_key = f"{path}:{method}:{action}" # noqa + # Some routes in Neutron are duplicated. We need to skip them since + # otherwise we may duplicate query parameters which are just a list + if processed_key not in processed_routes: + processed_routes[processed_key] = 1 + else: + logging.warning("Skipping duplicated route %s", processed_key) + return + + logging.info( + "Path: %s; method: %s; operation: %s", path, method, action + ) + + # Get Path elements + path_elements: list[str] = list(filter(None, path.split("/"))) + if path_elements and VERSION_RE.match(path_elements[0]): + path_elements.pop(0) + + operation_tags = self._get_tags_for_url(path) + + # Build path parameters (/foo/{foo_id}/bar/{id} => $foo_id, $foo_bar_id) + # Since for same path we are here multiple times check presence of + # parameter before adding new params + path_params: list[ParameterSchema] = [] + path_resource_names: list[str] = [] + for path_element in path_elements: + if "{" in path_element: + param_name = path_element.strip("{}") + global_param_name = ( + "_".join(path_resource_names) + f"_{param_name}" + ) + if global_param_name == "_project_id": + global_param_name = "project_id" + param_ref_name = f"#/components/parameters/{global_param_name}" + # Ensure reference to the param is in the path_params + if param_ref_name not in [ + k.ref for k in [p for p in path_params] + ]: + path_params.append(ParameterSchema(ref=param_ref_name)) + # Ensure global parameter is present + path_param = ParameterSchema( + location="path", name=param_name, required=True + ) + # openapi_spec.components["parameters"].setdefault(global_param_name, dict()) + if not path_param.description: + path_param.description = ( + f"{param_name} parameter for {path} API" + ) + # We can only assume the param type. For path it is logically a string only + path_param.type_schema = TypeSchema(type="string") + openapi_spec.components.parameters[global_param_name] = ( + path_param + ) + else: + path_resource_names.append(path_element.replace("-", "_")) + + if len(path_elements) == 0: + path_resource_names.append("root") + elif path_elements[-1].startswith("{"): + rn = path_resource_names[-1] + if rn.endswith("ies"): + rn = rn.replace("ies", "y") + else: + rn = rn.rstrip("s") + path_resource_names[-1] = rn + + # Set operationId + operation_id = re.sub( + r"^(/?v[0-9.]*/)", + "", + "/".join([x.strip("{}") for x in path_elements]) + + f":{method.lower()}", # noqa + ) + + path_spec = openapi_spec.paths.setdefault( + path, PathSchema(parameters=path_params) + ) + operation_spec = getattr(path_spec, method.lower()) + if not operation_spec.operationId: + operation_spec.operationId = operation_id + operation_spec.tags.extend(operation_tags) + operation_spec.tags = list(set(operation_spec.tags)) + for tag in operation_tags: + if tag not in [x["name"] for x in openapi_spec.tags]: + openapi_spec.tags.append({"name": tag}) + + self.process_operation( + func, + openapi_spec, + operation_spec, + path_resource_names, + controller=controller, + operation_name=action, + path=path, + method=method, + ) + + def process_operation( + self, + func, + openapi_spec, + operation_spec, + path_resource_names, + *, + controller=None, + operation_name=None, + method=None, + path=None, + ): + logging.info( + "Operation: %s", + operation_name, + ) + + attr_info = getattr(controller, "_attr_info", {}) + collection = getattr(controller, "_collection", None) + resource = getattr(controller, "_resource", None) + # Some backup locations for non extension like controller + if not attr_info: + attr_info = getattr(controller, "resource_info", {}) + if not collection: + collection = getattr(controller, "collection", None) + if not resource: + resource = getattr(controller, "resource", None) + + # body_schema_name = None + if method in ["POST", "PUT"]: + # Modification methods requires Body + schema_name = ( + "".join([x.title() for x in path_resource_names]) + + operation_name.title() + + "Request" + ) + + schema_ref = self._get_schema_ref( + openapi_spec, + schema_name, + description=f"Request of the {operation_spec.operationId} operation", + schema_def=attr_info, + method=method, + collection_key=collection, + resource_key=resource, + operation=operation_name, + ) + + if schema_ref: + content = operation_spec.requestBody.setdefault("content", {}) + mime_type = "application/json" + content[mime_type] = {"schema": {"$ref": schema_ref}} + + if operation_name == "index": + # Build query params + for field, data in attr_info.items(): + # operation_spec.setdefault("parameters", []) + if data.get("is_filter", False): + global_param_name = f"{collection}_{field}".replace( + ":", "_" + ) + param_ref_name = ( + f"#/components/parameters/{global_param_name}" + ) + # Ensure global parameter is present + query_param = ( + openapi_spec.components.parameters.setdefault( + global_param_name, + ParameterSchema( + location="query", + name=field, + type_schema=get_schema(data), + ), + ) + ) + if not query_param.description: + query_param.description = ( + f"{field} query parameter for {path} API" + ) + if field in [ + "tags", + "tags-any", + "not-tags", + "not-tags-any", + ]: + # Tags are special beasts + query_param.type_schema = TypeSchema( + type="array", items={"type": "string"} + ) + query_param.style = "form" + query_param.explode = False + if field == "fixed_ips": + # TODO: Neutron is expecting a + # trick to get an array of + # objects. For now we only + # implement array of strings + # (whatever they are). + query_param.type_schema = TypeSchema( + type="array", + items={"type": "string"}, + description="The IP addresses for the port. If the port has multiple IP addresses, this field has multiple entries. Each entry consists of IP address (ip_address) and the subnet ID from which the IP address is assigned (subnet_id).", + ) + query_param.style = "form" + query_param.explode = False + if param_ref_name not in [ + x.ref for x in operation_spec.parameters + ]: + operation_spec.parameters.append( + ParameterSchema(ref=param_ref_name) + ) + + responses_spec = operation_spec.responses + if method == "DELETE": + response_code = "204" + elif method == "POST": + response_code = "201" + else: + response_code = "200" + + if path.endswith("/tags/{id}"): + # /tags/{id} operation are non standard - they do not return body + if method == "PUT": + response_code = "201" + elif method == "GET": + response_code = "204" + + if response_code: + rsp = responses_spec.setdefault( + response_code, dict(description="Ok") + ) + if response_code != "204" and method != "DELETE": + # Arrange response placeholder + schema_name = ( + "".join([x.title() for x in path_resource_names]) + + operation_name.title() + + "Response" + ) + schema_ref = self._get_schema_ref( + openapi_spec, + schema_name, + description=f"Response of the {operation_spec.operationId} operation", + schema_def=attr_info, + method=method, + collection_key=collection, + resource_key=resource, + operation=operation_name, + ) + + if schema_ref: + rsp["content"] = { + "application/json": {"schema": {"$ref": schema_ref}} + } + + def _get_schema_ref( + self, + openapi_spec, + name, + description=None, + schema_def=None, + method=None, + collection_key=None, + resource_key=None, + operation=None, + ): + schema = openapi_spec.components.schemas.setdefault( + name, + TypeSchema( + type="object", + description=LiteralScalarString(description), + ), + ) + + # Here come schemas that are not present in Neutron + if name == "ExtensionsIndexResponse": + schema.properties = { + "extensions": { + "type": "array", + "items": copy.deepcopy(neutron_schemas.EXTENSION_SCHEMA), + } + } + elif name == "ExtensionShowResponse": + schema.properties = { + "extension": copy.deepcopy(neutron_schemas.EXTENSION_SCHEMA) + } + elif name.endswith("TagsIndexResponse"): + schema.properties = { + "tags": { + "type": "array", + "items": {"type": "string", "maxLength": 255}, + } + } + elif name.endswith("TagsUpdate_AllResponse") or name.endswith( + "TagsUpdate_AllRequest" + ): + schema.properties = { + "tags": { + "type": "array", + "items": {"type": "string", "maxLength": 255}, + } + } + elif name == "QuotasIndexResponse": + schema.properties = { + "quotas": { + "type": "array", + "items": copy.deepcopy(neutron_schemas.QUOTA_SCHEMA), + } + } + elif name == "QuotasDetailsDetailsResponse": + schema.properties = { + "quota": copy.deepcopy(neutron_schemas.QUOTA_DETAILS_SCHEMA) + } + elif name in [ + "QuotaShowResponse", + "QuotaUpdateRequest", + "QuotaUpdateResponse", + "QuotasDefaultDefaultResponse", + "QuotasProjectProjectResponse", + ]: + schema.properties = { + "quota": copy.deepcopy(neutron_schemas.QUOTA_SCHEMA) + } + elif name.endswith("TagUpdateRequest") or name.endswith( + "TagUpdateResponse" + ): + # PUT tag does not have request body + return None + + # ... + elif name in [ + # Routers + "RoutersAdd_Router_InterfaceAdd_Router_InterfaceRequest", + "RoutersAdd_Router_InterfaceAdd_Router_InterfaceResponse", + "RoutersRemove_Router_InterfaceRemove_Router_InterfaceRequest", + "RoutersRemove_Router_InterfaceRemove_Router_InterfaceResponse", + "RoutersAdd_ExtraroutesAdd_ExtraroutesRequest", + "RoutersAdd_ExtraroutesAdd_ExtraroutesResponse", + "RoutersRemove_ExtraroutesRemove_ExtraroutesRequest", + "RoutersRemove_ExtraroutesRemove_ExtraroutesResponse", + "RoutersAdd_External_GatewaysAdd_External_GatewaysRequest", + "RoutersAdd_External_GatewaysAdd_External_GatewaysResponse", + "RoutersUpdate_External_GatewaysUpdate_External_GatewaysRequest", + "RoutersUpdate_External_GatewaysUpdate_External_GatewaysResponse", + "RoutersRemove_External_GatewaysRemove_External_GatewaysRequest", + "RoutersRemove_External_GatewaysRemove_External_GatewaysResponse", + # L3 routers + "RoutersL3_AgentsIndexResponse", + "RoutersL3_AgentsCreateRequest", + "RoutersL3_AgentsCreateResponse", + "RoutersL3_AgentShowResponse", + "RoutersL3_AgentUpdateRequest", + "RoutersL3_AgentUpdateResponse" + # Subnet pool + "SubnetpoolsOnboard_Network_SubnetsOnboard_Network_SubnetsRequest", + "SubnetpoolsOnboard_Network_SubnetsOnboard_Network_SubnetsResponse", + ]: + logging.warning("TODO: provide schema description for %s", name) + + # And now basic CRUD operations, those take whichever info is available in Controller._attr_info + + elif operation in ["index", "show", "create", "update", "delete"]: + # Only CRUD operation are having request/response information avaiable + send_props = {} + return_props = {} + # Consume request name to required fields mapping + required_fields = neutron_schemas.REQUIRED_FIELDS_MAPPING.get( + name, [] + ) + for field, data in schema_def.items(): + js_schema = get_schema(data) + # Dirty hacks for corrupted schemas + if field in ["availability_zones", "tags"]: + js_schema.update( + {"type": "array", "items": {"type": "string"}} + ) + elif field == "revision_number": + js_schema.update({"type": "integer"}) + elif field == "subnets": + js_schema.update( + { + "type": "array", + "items": {"type": "string", "format": "uuid"}, + } + ) + elif field == "binding:vif_details": + js_schema.update({"type": "object"}) + if data.get(f"allow_{method.lower()}", False): + send_props[field] = js_schema + if data.get("is_visible", False): + return_props[field] = js_schema + if operation == "index" and collection_key: + schema.properties = { + collection_key: { + "type": "array", + "items": { + "type": "object", + "properties": ( + send_props + if name.endswith("Request") + else return_props + ), + }, + } + } + else: + if resource_key is not None: + schema.properties = { + resource_key: { + "type": "object", + "properties": ( + send_props + if name.endswith("Request") + else return_props + ), + } + } + if required_fields: + schema.properties[resource_key]["required"] = list( + required_fields + ) + else: + logging.warning("No Schema information for %s" % name) + + return f"#/components/schemas/{name}" + + +def get_schema(param_data): + """Convert Neutron API definition into json schema""" + schema: dict[str, Any] = {} + validate = param_data.get("validate") + convert_to = param_data.get("convert_to") + typ_ = "string" + if convert_to: + if callable(convert_to): + fname = convert_to.__name__ + if fname == "convert_to_boolean": + typ_ = "boolean" + elif fname == "convert_to_int": + typ_ = "integer" + + if validate: + if "type:uuid" in validate: + schema = {"type": "string", "format": "uuid"} + elif "type:uuid_or_none" in validate: + schema = {"type": ["string", "null"], "format": "uuid"} + elif "type:uuid_list" in validate: + schema = { + "type": "array", + "items": {"type": "string", "format": "uuid"}, + } + elif "type:string" in validate: + length = validate.get("type:string") + schema = {"type": "string"} + if length: + schema["maxLength"] = length + elif "type:string_or_none" in validate: + length = validate.get("type:string_or_none") + schema = {"type": ["string", "null"]} + if length: + schema["maxLength"] = length + elif "type:list_of_unique_strings" in validate: + length = validate.get("type:list_of_unique_strings") + schema = { + "type": "array", + "items": {"type": "string"}, + "uniqueItems": True, + } + if length: + schema["items"]["maxLength"] = length + elif "type:dict_or_none" in validate: + schema = {"type": ["object", "null"]} + elif "type:mac_address" in validate: + schema = {"type": "string"} + elif "type:dns_host_name" in validate: + length = validate.get("type:dns_host_name") + schema = {"type": "string", "format": "hostname"} + if length: + schema["maxLength"] = length + elif "type:values" in validate: + schema = {"type": typ_, "enum": list(validate["type:values"])} + elif "type:range" in validate: + r = validate["type:range"] + schema = {"type": "number", "minimum": r[0], "maximum": r[1]} + elif "type:range_or_none" in validate: + r = validate["type:range_or_none"] + schema = { + "type": ["number", "null"], + "minimum": r[0], + "maximum": r[1], + } + elif "type:port_range" in validate: + r = validate["type:port_range"] + schema = {"type": "number", "minimum": r[0], "maximum": r[1]} + elif "type:external_gw_info" in validate: + schema = { + "type": "object", + "properties": { + "network_id": {"type": "string", "format": "uuid"}, + "enable_snat": {"type": "boolean"}, + "external_fixed_ips": { + "type": "array", + "items": { + "type": "object", + "properties": { + "ip_address": {"type": "string"}, + "subnet_id": { + "type": "string", + "format": "uuid", + }, + }, + }, + }, + }, + "required": ["network_id"], + } + elif "type:availability_zone_hint_list" in validate: + schema = {"type": "array", "items": {"type": "string"}} + elif "type:hostroutes" in validate: + schema = { + "type": "array", + "items": { + "type": "object", + "properties": { + "destination": {"type": "string"}, + "nexthop": {"type": "string"}, + }, + }, + } + elif "type:network_segments" in validate: + schema = { + "type": "array", + "items": { + "type": "object", + "properties": { + "provider:segmentation_id": {"type": "integer"}, + "provider:physical_network": {"type": "string"}, + "provider:network_type": {"type": "string"}, + }, + }, + } + elif "type:non_negative" in validate: + schema = {"type": "integer", "minimum": 0} + elif "type:dns_domain_name" in validate: + length = validate.get("type:dns_domain_name") + schema = {"type": "string", "format": "hostname"} + if length: + schema["maxLength"] = length + elif "type:fixed_ips" in validate: + schema = { + "type": "array", + "items": { + "type": "object", + "properties": { + "ip_address": { + "type": "string", + "description": "IP Address", + }, + "subnet_id": { + "type": "string", + "description": "The subnet ID from which the IP address is assigned", + }, + }, + }, + } + elif "type:allowed_address_pairs" in validate: + schema = { + "type": "array", + "items": { + "type": "object", + "properties": { + "ip_address": {"type": "string"}, + "max_address": {"type": "string"}, + }, + }, + } + elif "type:list_of_any_key_specs_or_none" in validate: + logging.warning( + "TODO: Implement type:list_of_any_key_specs_or_none" + ) + schema = { + "type": "array", + "items": { + "type": "object", + "extraProperties": True, + }, + "x-openstack": {"todo": "implementme"}, + } + elif "type:subnet_list" in validate: + schema = { + "type": "array", + "items": { + "type": "string", + "format": "uuid", + }, + } + elif "type:service_plugin_type" in validate: + schema = { + "type": "string", + } + elif "type:ip_address" in validate: + schema = { + "type": "string", + } + elif "type:ip_address_or_none" in validate: + schema = { + "type": "string", + } + elif "type:subnet_or_none" in validate: + schema = {"type": ["string", "null"]} + elif "type:fip_dns_host_name" in validate: + length = validate.get("type:fip_dns_host_name") + schema = {"type": "string"} + if length: + schema["maxLength"] = length + elif "type:name_not_default" in validate: + length = validate.get("type:name_not_default") + schema = {"type": "string"} + if length: + schema["maxLength"] = length + elif "type:not_empty_string" in validate: + length = validate.get("type:not_empty_string") + schema = {"type": "string"} + if length: + schema["maxLength"] = length + elif "type:subnetpool_id_or_none" in validate: + schema = {"type": ["string", "null"]} + elif "type:ip_pools" in validate: + schema = { + "type": "array", + "items": { + "type": "object", + "properties": { + "start": {"type": "string"}, + "end": {"type": "string"}, + }, + }, + } + elif "type:nameservers" in validate: + schema = { + "type": "array", + "items": { + "type": "string", + }, + } + elif "type:list_of_subnet_service_types" in validate: + schema = { + "type": "array", + "description": "The service types associated with the subnet", + "items": { + "type": "string", + }, + } + elif "type:dict_or_nodata" in validate: + schema = get_schema(validate["type:dict_or_nodata"]) + elif "type:dict_or_empty" in validate: + schema = get_schema(validate["type:dict_or_empty"]) + elif "type:list_of_subnets_or_none" in validate: + schema = {"type": "array", "items": {"type": "string"}} + else: + raise RuntimeError( + "Unsupported type %s in %s" % (validate, param_data) + ) + schema = {"type": "string"} + elif convert_to: + # Nice way to get type of the field, isn't it? + if convert_to.__name__ == "convert_to_boolean": + schema = {"type": ["string", "boolean"]} + elif convert_to.__name__ == "convert_to_boolean_if_not_none": + schema = {"type": ["string", "boolean", "null"]} + elif convert_to.__name__ == "convert_to_int": + schema = {"type": ["string", "integer"]} + elif convert_to.__name__ == "convert_to_int_if_not_none": + schema = {"type": ["string", "integer", "null"]} + else: + logging.warning( + "Unsupported conversion function %s used", convert_to.__name__ + ) + + if not schema: + default = param_data.get("default") + if default is not None: + if isinstance(default, list): + schema = {"type": "array", "items": {"type": "string"}} + if not schema: + schema = {"type": "string"} + + return schema diff --git a/codegenerator/openapi/neutron_schemas.py b/codegenerator/openapi/neutron_schemas.py new file mode 100644 index 0000000..7ebee04 --- /dev/null +++ b/codegenerator/openapi/neutron_schemas.py @@ -0,0 +1,148 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import copy + +"""Mapping of the Requests to required fields list + +Neutron API defitnitions have no clear way on how to detect required fields in +the request. This mapping is adding such information. +""" +REQUIRED_FIELDS_MAPPING = { + "SubnetsCreateRequest": ["network_id", "ip_version"], + "FloatingipsCreateRequest": ["floating_network_id"], + "FloatingipsUpdateRequest": ["port_id"], +} + +EXTENSION_SCHEMA = { + "type": "object", + "properties": { + "alias": { + "type": "string", + "description": "A short name by which this extension is also known.", + }, + "description": { + "type": "string", + "description": "Text describing this extension’s purpose.", + }, + "name": {"type": "string", "description": "Name of the extension."}, + "namespace": { + "type": "string", + "description": "A URL pointing to the namespace for this extension.", + }, + "updated": { + "type": "string", + "format": "date-time", + "description": "The date and time when the resource was updated.", + }, + }, +} + +QUOTA_SCHEMA = { + "type": "object", + "properties": { + "floatingip": { + "type": "integer", + "description": "The number of floating IP addresses allowed for each project. A value of -1 means no limit.", + }, + "network": { + "type": "integer", + "description": "The number of networks allowed for each project. A value of -1 means no limit.", + }, + "port": { + "type": "integer", + "description": "The number of ports allowed for each project. A value of -1 means no limit.", + }, + "rbac_policy": { + "type": "integer", + "description": "The number of role-based access control (RBAC) policies for each project. A value of -1 means no limit.", + }, + "router": { + "type": "integer", + "description": "The number of routers allowed for each project. A value of -1 means no limit.", + }, + "security_group": { + "type": "integer", + "description": "The number of security groups allowed for each project. A value of -1 means no limit.", + }, + "security_group_rule": { + "type": "integer", + "description": "The number of security group rules allowed for each project. A value of -1 means no limit.", + }, + "subnet": { + "type": "integer", + "description": "The number of subnets allowed for each project. A value of -1 means no limit.", + }, + "subnetpool": { + "type": "integer", + "description": "The number of subnet pools allowed for each project. A value of -1 means no limit.", + }, + "project_id": { + "type": "string", + "format": "uuid", + "description": "The ID of the project.", + }, + }, +} + + +QUOTA_DETAIL_SCHEMA = { + "type": "object", + "properties": { + "used": {"type": "integer", "description": "Used quota"}, + "limit": {"type": "integer", "description": "Current quota limit"}, + "reserved": {"type": "integer", "description": "Reserved quota"}, + }, +} + + +QUOTA_DETAILS_SCHEMA = { + "type": "object", + "properties": { + "floatingip": { + "description": "The number of floating IP addresses allowed for each project.", + **copy.deepcopy(QUOTA_DETAIL_SCHEMA), + }, + "network": { + "description": "The number of networks allowed for each project.", + **copy.deepcopy(QUOTA_DETAIL_SCHEMA), + }, + "port": { + "description": "The number of ports allowed for each project.", + **copy.deepcopy(QUOTA_DETAIL_SCHEMA), + }, + "rbac_policy": { + "description": "The number of role-based access control (RBAC) policies for each project.", + **copy.deepcopy(QUOTA_DETAIL_SCHEMA), + }, + "router": { + "description": "The number of routers allowed for each project.", + **copy.deepcopy(QUOTA_DETAIL_SCHEMA), + }, + "security_group": { + "description": "The number of security groups allowed for each project.", + **copy.deepcopy(QUOTA_DETAIL_SCHEMA), + }, + "security_group_rule": { + "description": "The number of security group rules allowed for each project.", + **copy.deepcopy(QUOTA_DETAIL_SCHEMA), + }, + "subnet": { + "description": "The number of subnets allowed for each project.", + **copy.deepcopy(QUOTA_DETAIL_SCHEMA), + }, + "subnetpool": { + "description": "The number of subnet pools allowed for each project.", + **copy.deepcopy(QUOTA_DETAIL_SCHEMA), + }, + }, +} diff --git a/codegenerator/openapi/nova.py b/codegenerator/openapi/nova.py new file mode 100644 index 0000000..07d918d --- /dev/null +++ b/codegenerator/openapi/nova.py @@ -0,0 +1,662 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +from multiprocessing import Process +from pathlib import Path + +from ruamel.yaml.scalarstring import LiteralScalarString + +from codegenerator.common.schema import ( + SpecSchema, + TypeSchema, + ParameterSchema, + HeaderSchema, +) +from codegenerator.openapi.base import OpenStackServerSourceBase +from codegenerator.openapi import nova_schemas +from codegenerator.openapi.utils import merge_api_ref_doc + + +class NovaGenerator(OpenStackServerSourceBase): + URL_TAG_MAP = { + "/versions": "version", + "/os-quota-sets": "quota-sets-os-quota-sets", + "/os-quota-class-sets": "quota-class-sets-os-quota-class-sets", + "/os-console-auth-tokens/": "server-consoles", + "/servers/{server_id}/remote-consoles": "server-consoles", + "/servers/{server_id}/migrations": "server-migrations", + "/servers/{server_id}/tags": "server-tags", + } + + def _api_ver_major(self, ver): + return ver.ver_major + + def _api_ver_minor(self, ver): + return ver.ver_minor + + def _api_ver(self, ver): + return (ver.ver_major, ver.ver_minor) + + def _generate(self, target_dir, args): + from nova.api.openstack import api_version_request + from nova.api.openstack.compute import routes + from nova.tests import fixtures as nova_fixtures + + self.api_version = api_version_request._MAX_API_VERSION + self.min_api_version = api_version_request._MIN_API_VERSION + + self.useFixture(nova_fixtures.RPCFixture("nova.test")) + self.router = routes.APIRouterV21() + + work_dir = Path(target_dir) + work_dir.mkdir(parents=True, exist_ok=True) + + impl_path = Path(work_dir, "openapi_specs", "compute", "v2.yaml") + impl_path.parent.mkdir(parents=True, exist_ok=True) + + openapi_spec = self.load_openapi(impl_path) + if not openapi_spec: + openapi_spec = SpecSchema( + info=dict( + title="OpenStack Compute API", + description=LiteralScalarString( + "Compute API provided by Nova service" + ), + version=self.api_version, + ), + openapi="3.1.0", + security=[{"ApiKeyAuth": []}], + components=dict( + securitySchemes={ + "ApiKeyAuth": { + "type": "apiKey", + "in": "header", + "name": "X-Auth-Token", + } + }, + ), + ) + + for route in self.router.map.matchlist: + if route.routepath.startswith("/{project"): + continue + self._process_route(route, openapi_spec, ver_prefix="/v2.1") + + self._sanitize_param_ver_info(openapi_spec, self.min_api_version) + + if args.api_ref_src: + merge_api_ref_doc( + openapi_spec, + args.api_ref_src, + allow_strip_version=False, + doc_url_prefix="/v2.1", + ) + + self.dump_openapi(openapi_spec, impl_path, args.validate) + + return impl_path + + def generate(self, target_dir, args): + proc = Process(target=self._generate, args=[target_dir, args]) + proc.start() + proc.join() + if proc.exitcode != 0: + raise RuntimeError("Error generating Compute OpenAPI schema") + return Path(target_dir, "openapi_specs", "compute", "v2.yaml") + + def _get_param_ref( + self, + openapi_spec, + ref_name: str, + param_name: str, + param_location: str, + path: str | None = None, + **param_attrs, + ): + if ref_name == "os_instance_usage_audit_log_id": + openapi_spec.components.parameters[ref_name] = ParameterSchema( + location="path", + name="id", + type_schema=TypeSchema(type="string", format="date-time"), + description="Filters the response by the date and time before which to list usage audits.", + required=True, + ) + ref = f"#/components/parameters/{ref_name}" + else: + ref = super()._get_param_ref( + openapi_spec, + ref_name, + param_name=param_name, + param_location=param_location, + path=path, + **param_attrs, + ) + + return ref + + def _get_schema_ref( + self, + openapi_spec, + name, + description=None, + schema_def=None, + action_name=None, + ): + from nova.api.openstack.compute.schemas import flavor_manage + + schema = None + mime_type: str = "application/json" + # NOTE(gtema): This must go away once scemas are merged directly to + # Nova + # /servers + if name == "ServersCreateResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.SERVER_CREATED_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + + elif name == "ServersListResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.SERVER_LIST_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name == "ServersDetailResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.SERVER_LIST_DETAIL_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name in ["ServerShowResponse", "ServerUpdateResponse"]: + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.SERVER_CONTAINER_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + # /servers/{id}/action + elif name in [ + "ServersActionRevertresizeResponse", + "ServersActionRebootResponse", + "ServersActionResizeResponse", + "ServersActionRebuildResponse", + "ServersActionOs-StartResponse", + "ServersActionOs-StopResponse", + "ServersActionTrigger_Crash_DumpResponse", + "ServersActionInjectnetworkinfoResponse", + "ServersActionOs-ResetstateResponse", + "ServersActionChangepasswordResponse", + "ServersActionRestoreResponse", + "ServersActionForcedeleteResponse", + "ServersActionLockResponse", + "ServersActionUnlockResponse", + "ServersActionMigrateResponse", + "ServersActionOs-MigrateliveResponse", + "ServersActionPauseResponse", + "ServersActionUnpauseResponse", + "ServersActionUnrescueResponse", + "ServersActionAddsecuritygroupResponse", + "ServersActionRemovesecuritygroupResponse", + "ServersActionShelveResponse", + "ServersActionShelveoffloadResponse", + "ServersActionUnshelveResponse", + "ServersActionSuspendResponse", + "ServersActionResumeResponse", + "ServersActionResetnetworkResponse", + "ServersActionAddfloatingipResponse", + "ServersActionRemovefloatingipResponse", + "ServersActionAddfixedipResponse", + "ServersActionRemovefixedipResponse", + ]: + return (None, None) + elif name in [ + "ServersActionCreateimageResponse", + "ServersActionCreatebackupResponse", + ]: + schema = openapi_spec.components.schemas.setdefault( + name, + TypeSchema( + **nova_schemas.SERVER_ACTION_CREATE_IMAGE_RESPONSE_SCHEMA + ), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "ServersActionEvacuateResponse", + "ServersActionRescueResponse", + ]: + schema = openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**nova_schemas.SERVER_ACTION_NEW_ADMINPASS_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name == "ServersActionOs-GetconsoleoutputResponse": + schema = openapi_spec.components.schemas.setdefault( + name, + TypeSchema( + **nova_schemas.SERVER_ACTION_GET_CONSOLE_OUTPUT_SCHEMA + ), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "ServersActionOs-GetvncconsoleResponse", + "ServersActionOs-GetspiceconsoleResponse", + "ServersActionOs-GetrdpconsoleResponse", + "ServersActionOs-GetserialconsoleResponse", + ]: + schema = openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**nova_schemas.SERVER_ACTION_REMOTE_CONSOLE_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + # /server/id/diagnostics + elif name == "ServersDiagnosticsListResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.SERVER_DIAGNOSTICS_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + # /server/id/ips + elif name == "ServersIpsListResponse": + schema = openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**nova_schemas.SERVER_ADDRESSES_CONTAINER_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name == "ServersIpShowResponse": + schema = openapi_spec.components.schemas.setdefault( + name, + TypeSchema( + maxProperties=1, **nova_schemas.SERVER_ADDRESSES_SCHEMA + ), + ) + ref = f"#/components/schemas/{name}" + # /servers/id/metadata + elif name in [ + "ServersMetadataListResponse", + "ServersMetadataCreateResponse", + "ServersMetadataUpdate_AllResponse", + ]: + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.SERVER_METADATA_LIST_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name in ["ServersMetadataShowResponse", "ServersMetadataUpdate"]: + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.SERVER_METADATA_ITEM_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + # /server/id/os-instance-actions + elif name == "ServersOs_Instance_ActionsListResponse": + schema = openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**nova_schemas.SERVER_INSTANCE_ACTION_LIST_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name == "ServersOs_Instance_ActionShowResponse": + schema = openapi_spec.components.schemas.setdefault( + name, + TypeSchema( + **nova_schemas.SERVER_INSTANCE_ACTION_CONTAINER_SCHEMA + ), + ) + ref = f"#/components/schemas/{name}" + # /server/id/os-interface-attachment + elif name == "ServersOs_InterfaceListResponse": + schema = openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**nova_schemas.INTERFACE_ATTACHMENT_LIST_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "ServersOs_InterfaceCreateResponse", + "ServersOs_InterfaceShowResponse", + ]: + schema = openapi_spec.components.schemas.setdefault( + name, + TypeSchema( + **nova_schemas.INTERFACE_ATTACHMENT_CONTAINER_SCHEMA + ), + ) + ref = f"#/components/schemas/{name}" + # /server/id/os-server-password + elif name == "ServersOs_Server_PasswordListResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.SERVER_PASSWORD_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + # /server/id/os-volume_attachments + elif name == "ServersOs_Volume_AttachmentsListResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.VOLUME_ATTACHMENT_LIST_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "ServersOs_Volume_AttachmentsCreateResponse", + "ServersOs_Volume_AttachmentShowResponse", + "ServersOs_Volume_AttachmentUpdateResponse", + ]: + schema = openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**nova_schemas.VOLUME_ATTACHMENT_CONTAINER_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + + # /flavors/... + elif name == "FlavorsListResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.FLAVORS_LIST_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name == "FlavorsDetailResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.FLAVORS_LIST_DETAIL_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "FlavorsCreateResponse", + "FlavorShowResponse", + "FlavorUpdateResponse", + ]: + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.FLAVOR_CONTAINER_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name == "FlavorUpdateRequest": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**flavor_manage.update_v2_55) + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "FlavorsOs_Flavor_AccessListResponse", + "FlavorsActionAddtenantaccessResponse", + "FlavorsActionRemovetenantaccessResponse", + ]: + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.FLAVOR_ACCESSES_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "FlavorsOs_Extra_SpecsListResponse", + "FlavorsOs_Extra_SpecsCreateResponse", + ]: + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.FLAVOR_EXTRA_SPECS_LIST_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "FlavorsOs_Extra_SpecShowResponse", + "FlavorsOs_Extra_SpecUpdateResponse", + ]: + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.FLAVOR_EXTRA_SPEC_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + # /limits + elif name == "LimitsListResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.LIMITS_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + # /os-aggregates + elif name == "Os_AggregatesListResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.AGGREGATE_LIST_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "Os_AggregatesCreateResponse", + "Os_AggregateShowResponse", + "Os_AggregateUpdateResponse", + "Os_AggregatesActionAdd_HostResponse", + "Os_AggregatesActionRemove_HostResponse", + "Os_AggregatesActionSet_MetadataResponse", + ]: + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.AGGREGATE_CONTAINER_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name == "Os_AggregatesImagesResponse": + return (None, None) + # /os-assisted-volume-snapshots + elif name == "Os_Assisted_Volume_SnapshotsCreateResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.VOLUME_SNAPSHOT_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + # /os-assisted-volume-snapshots + elif name == "Os_Assisted_Volume_SnapshotsCreateResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.VOLUME_SNAPSHOT_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + # /os-availability-zone + elif name == "Os_Availability_ZoneListResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.AZ_LIST_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name == "Os_Availability_ZoneDetailResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.AZ_LIST_DETAIL_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + # /os-console-auth-tokens/{id} + elif name == "Os_Console_Auth_TokenShowResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.CONSOLE_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + # /servers/{id}/remote-console + elif name == "ServersRemote_ConsolesCreateResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.REMOTE_CONSOLE_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + # /os-hypervisors + elif name == "Os_HypervisorsListResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.HYPERVISOR_LIST_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name == "Os_HypervisorsDetailResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.HYPERVISOR_LIST_DETAIL_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name == "Os_HypervisorShowResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.HYPERVISOR_CONTAINER_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + # /os-instance_usage_audit_log + elif name in [ + "Os_Instance_Usage_Audit_LogListResponse", + "Os_Instance_Usage_Audit_LogShowResponse", + ]: + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.INSTANCE_USAGE_AUDIT_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + # /os-keypairs + elif name == "Os_KeypairsListResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.KEYPAIR_LIST_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name == "Os_KeypairShowResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.KEYPAIR_CONTAINER_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name == "Os_KeypairsCreateResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.KEYPAIR_CREATED_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + # /os-migrations + elif name == "Os_MigrationsListResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.MIGRATION_LIST_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + # /servers/{server_id}/migrations + elif name == "ServersMigrationsActionForce_CompleteResponse": + return (None, None) + elif name == "ServersMigrationsListResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.SERVER_MIGRATION_LIST_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name == "ServersMigrationShowResponse": + schema = openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**nova_schemas.SERVER_MIGRATION_CONTAINER_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + # /os-quota + elif name in [ + "Os_Quota_SetShowResponse", + "Os_Quota_SetUpdateResponse", + "Os_Quota_SetsDefaultsResponse", + ]: + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.QUOTA_SET_CONTAINER_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name == "Os_Quota_SetsDetailResponse": + schema = openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**nova_schemas.QUOTA_SET_DETAIL_CONTAINER_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "Os_Quota_Class_SetShowResponse", + "Os_Quota_Class_SetUpdate", + ]: + schema = openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**nova_schemas.QUOTA_CLASS_SET_CONTAINER_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + # /os-external-events + elif name == "Os_Server_External_EventsCreateResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.EXTERNAL_EVENTS_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + # /os-server-groups + elif name == "Os_Server_GroupsListResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.SERVER_GROUP_LIST_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "Os_Server_GroupsCreateResponse", + "Os_Server_GroupShowResponse", + ]: + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.SERVER_GROUP_CONTAINER_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + # /os-services + elif name == "Os_ServicesListResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.SERVICE_LIST_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "Os_ServiceUpdateResponse", + "Os_Server_GroupShowResponse", + ]: + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.SERVICE_CONTAINER_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + # /os-simple-tenant-usage + elif name in [ + "Os_Simple_Tenant_UsageListResponse", + "Os_Simple_Tenant_UsageShowResponse", + ]: + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.TENANT_USAGE_LIST_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + + # Server Topology + elif name == "ServersTopologyListResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.SERVER_TOPOLOGY_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name == "ServersOs_Security_GroupsListResponse": + schema = openapi_spec.components.schemas.setdefault( + name, + TypeSchema(**nova_schemas.SERVER_SECURITY_GROUPS_LIST_SCHEMA), + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "ServersTagsListResponse", + "ServersTagsUpdate_All", + "ServersTagsUpdate_AllResponse", + ]: + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.SERVER_TAGS_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + + # Compute extensions + elif name == "ExtensionsListResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.EXTENSION_LIST_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name == "ExtensionShowResponse": + schema = openapi_spec.components.schemas.setdefault( + name, TypeSchema(**nova_schemas.EXTENSION_CONTAINER_SCHEMA) + ) + ref = f"#/components/schemas/{name}" + elif name in [ + "ServersTagGetResponse", + "ServersTagUpdateRequest", + "ServersTagUpdateResponse", + ]: + # Operations without body + return (None, None) + else: + (ref, mime_type) = super()._get_schema_ref( + openapi_spec, name, description, action_name=action_name + ) + if action_name and schema: + if not schema.openstack: + schema.openstack = {} + schema.openstack.setdefault("action-name", action_name) + + if schema: + print(schema.model_dump()) + return (ref, mime_type) + + def _post_process_operation_hook( + self, openapi_spec, operation_spec, path: str | None = None + ): + """Hook to allow service specific generator to modify details""" + if operation_spec.operationId == "servers/id/action:post": + # Sereral server actions may return Location header + operation_spec.responses.setdefault( + "202", {"description": "Accepted"} + ) + headers_202 = operation_spec.responses["202"].setdefault( + "headers", {} + ) + headers_202.setdefault( + "Location", + HeaderSchema( + description='The image location URL of the image or backup created, HTTP header "Location: <image location URL>" will be returned. May be returned only in response of `createBackup` and `createImage` actions.', + schema=TypeSchema(type="string"), + openstack={"max-ver": "2.44"}, + ), + ) + super()._post_process_operation_hook(openapi_spec, operation_spec) diff --git a/codegenerator/openapi/nova_schemas.py b/codegenerator/openapi/nova_schemas.py new file mode 100644 index 0000000..2963085 --- /dev/null +++ b/codegenerator/openapi/nova_schemas.py @@ -0,0 +1,2378 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import copy +from typing import Any + +from nova.api.openstack.compute.schemas import flavors_extraspecs +from nova.api.openstack.compute.schemas import quota_sets +from nova.api.openstack.compute.schemas import remote_consoles +from nova.api.validation import parameter_types + +# NOTE(gtema): This is a temporary location for schemas not currently defined +# in Nova. Once everything is stabilized those must be moved directly to Nova + +LINK_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "Links to the resources in question. See [API Guide / Links and References](https://docs.openstack.org/api-guide/compute/links_and_references.html) for more info.", + "properties": { + "href": {"type": "string", "format": "uri"}, + "rel": {"type": "string"}, + }, +} + +LINKS_SCHEMA: dict[str, Any] = { + "type": "array", + "description": "Links to the resources in question. See [API Guide / Links and References](https://docs.openstack.org/api-guide/compute/links_and_references.html) for more info.", + "items": copy.deepcopy(LINK_SCHEMA), +} + + +SERVER_TAGS_SCHEMA: dict[str, Any] = { + "description": "Server Tags", + "type": "object", + "properties": { + "tags": { + "type": "array", + "description": "A list of tags. The maximum count of tags in this list is 50.", + "items": { + "type": "string", + }, + } + }, +} + +SERVER_TOPOLOGY_SCHEMA: dict[str, Any] = { + "description": "NUMA topology information for a server", + "type": "object", + "properties": { + "nodes": { + "description": "NUMA nodes information of a server", + "type": "array", + "items": { + "type": "object", + "description": "NUMA node information of a server", + "properties": { + "cpu_pinning": { + "type": "object", + "description": "The mapping of server cores to host physical CPU", + "additionalProperties": { + "type": "integer", + }, + }, + "vcpu_set": { + "type": "array", + "description": "A list of IDs of the virtual CPU assigned to this NUMA node.", + "items": {"type": "integer"}, + }, + "siblings": { + "type": "array", + "description": "A mapping of host cpus thread sibling.", + "items": {"type": "integer"}, + }, + "memory_mb": { + "type": "integer", + "description": "The amount of memory assigned to this NUMA node in MB.", + }, + "host_node": { + "type": "integer", + "description": "The host NUMA node the virtual NUMA node is map to.", + }, + "pagesize_kb": { + "type": ["integer", "null"], + "description": "The page size in KB of a server. This field is null if the page size information is not available.", + }, + }, + }, + } + }, +} + +FLAVOR_EXTRA_SPEC_SCHEMA: dict[str, Any] = { + "minProperties": 1, + "maxProperties": 1, + "examples": {"JSON Request": {"hw:numa_nodes": "1"}}, + **flavors_extraspecs.metadata, +} + +FLAVOR_EXTRA_SPECS_SCHEMA: dict[str, Any] = flavors_extraspecs.metadata +FLAVOR_EXTRA_SPECS_LIST_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "A dictionary of the flavor’s extra-specs key-and-value pairs. It appears in the os-extra-specs’ “create” REQUEST body, as well as the os-extra-specs’ “create” and “list” RESPONSE body.", + "properties": {"extra_specs": flavors_extraspecs.metadata}, +} + +FLAVOR_SHORT_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "id": {"type": "string", "format": "uuid"}, + "name": {"type": "string"}, + "description": {"type": "string"}, + "links": LINKS_SCHEMA, + }, +} +FLAVOR_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The display name of a flavor.", + }, + "id": { + "type": "string", + "description": "The ID of the flavor. While people often make this look like an int, this is really a string.", + "minLength": 1, + "maxLength": 255, + "pattern": "^(?! )[a-zA-Z0-9. _-]+(?<! )$", + }, + "ram": { + "description": "The amount of RAM a flavor has, in MiB.", + **parameter_types.flavor_param_positive, + }, + "vcpus": { + "description": "The number of virtual CPUs that will be allocated to the server.", + **parameter_types.flavor_param_positive, + }, + "disk": { + "description": "The size of the root disk that will be created in GiB. If 0 the root disk will be set to exactly the size of the image used to deploy the instance. However, in this case the scheduler cannot select the compute host based on the virtual image size. Therefore, 0 should only be used for volume booted instances or for testing purposes. Volume-backed instances can be enforced for flavors with zero root disk via the os_compute_api:servers:create:zero_disk_flavor policy rule.", + **parameter_types.flavor_param_non_negative, + }, + "OS-FLV-EXT-DATA:ephemeral": { + "description": "The size of the ephemeral disk that will be created, in GiB. Ephemeral disks may be written over on server state changes. So should only be used as a scratch space for applications that are aware of its limitations. Defaults to 0.", + **parameter_types.flavor_param_non_negative, + }, + "swap": { + "description": "The size of a dedicated swap disk that will be allocated, in MiB. If 0 (the default), no dedicated swap disk will be created. Currently, the empty string (‘’) is used to represent 0. As of microversion 2.75 default return value of swap is 0 instead of empty string.", + **parameter_types.flavor_param_non_negative, + }, + "rxtx_factor": { + "description": "The receive / transmit factor (as a float) that will be set on ports if the network backend supports the QOS extension. Otherwise it will be ignored. It defaults to 1.0.", + "type": ["number", "string"], + "pattern": r"^[0-9]+(\.[0-9]+)?$", + "minimum": 0, + "exclusiveMinimum": True, + "maximum": 3.40282e38, + }, + "os-flavor-access:is_public": { + "description": "Whether the flavor is public (available to all projects) or scoped to a set of projects. Default is True if not specified.", + **parameter_types.boolean, + }, + "extra_specs": FLAVOR_EXTRA_SPECS_SCHEMA, + "links": LINKS_SCHEMA, + }, + "additionalProperties": False, +} + +FLAVOR_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "Single flavor details", + "properties": {"flavor": copy.deepcopy(FLAVOR_SCHEMA)}, +} + +FLAVORS_LIST_SCHEMA: dict[str, Any] = { + "description": "Flavors list response", + "type": "object", + "properties": { + "flavors": { + "type": "array", + "items": copy.deepcopy(FLAVOR_SHORT_SCHEMA), + } + }, +} + +FLAVORS_LIST_DETAIL_SCHEMA: dict[str, Any] = { + "description": "Detailed flavors list response", + "type": "object", + "properties": { + "flavors": {"type": "array", "items": copy.deepcopy(FLAVOR_SCHEMA)} + }, +} + +FLAVOR_ACCESS_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "flavor_id": {"type": "string", "format": "uuid"}, + "tenant_id": {"type": "string", "format": "uuid"}, + }, +} +FLAVOR_ACCESSES_SCHEMA: dict[str, Any] = { + "description": "A list of objects, each with the keys flavor_id and tenant_id.", + "type": "object", + "properties": { + "flavor_access": { + "type": "array", + "items": copy.deepcopy(FLAVOR_ACCESS_SCHEMA), + } + }, +} + +LIMITS_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "Data structure that contains both absolute limits within a deployment.", + "properties": { + "absolute": { + "type": "object", + "properties": { + "maxServerGroupMembers": { + "type": "integer", + "description": "The number of allowed members for each server group.", + }, + "maxServerGroups": { + "type": "integer", + "description": "The number of allowed server groups for each tenant.", + }, + "maxServerMetamaxServerMeta": { + "type": "integer", + "description": "The number of allowed metadata items for each server.", + }, + "maxTotalCores": { + "type": "integer", + "description": "The number of allowed server cores for each tenant.", + }, + "maxTotalInstances": { + "type": "integer", + "description": "The number of allowed servers for each tenant.", + }, + "maxTotalKeypairs": { + "type": "integer", + "description": "The number of allowed key pairs for each user.", + }, + "maxTotalRAMSize": { + "type": "integer", + "description": "The amount of allowed server RAM, in MiB, for each tenant.", + }, + "totalCoresUsed": { + "type": "integer", + "description": "The number of used server cores in each tenant. If reserved query parameter is specified and it is not 0, the number of reserved server cores are also included.", + }, + "totalInstancesUsed": { + "type": "integer", + "description": "The number of servers in each tenant. If reserved query parameter is specified and it is not 0, the number of reserved servers are also included.", + }, + "totalRAMUsed": { + "type": "integer", + "description": "The amount of used server RAM in each tenant. If reserved query parameter is specified and it is not 0, the amount of reserved server RAM is also included.", + }, + "totalServerGroupsUsed": { + "type": "integer", + "description": "The number of used server groups in each tenant. If reserved query parameter is specified and it is not 0, the number of reserved server groups are also included.", + }, + }, + "additionalProperties": {"type": "integer"}, + }, + }, +} + +AGGREGATE_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "The host aggregate object", + "properties": { + "availability_zone": { + "type": "string", + "description": "The availability zone of the host aggregate.", + }, + "created_at": { + "type": "string", + "format": "date-time", + "description": "The date and time when the resource was created.", + }, + "deleted": { + "type": "boolean", + "description": "A boolean indicates whether this aggregate is deleted or not, if it has not been deleted, false will appear.", + }, + "deleted_at": { + "type": ["string", "null"], + "format": "date-time", + "description": "The date and time when the resource was deleted. If the resource has not been deleted yet, this field will be null.", + }, + "id": { + "type": "integer", + "description": "The ID of the host aggregate.", + }, + "metadata": parameter_types.metadata, + "hosts": { + "type": "array", + "description": "A list of host ids in this aggregate.", + "items": {"type": "string"}, + }, + "updated_at": { + "type": ["string", "null"], + "format": "date-time", + "description": "The date and time when the resource was updated, if the resource has not been updated, this field will show as null.", + }, + "uuid": { + "type": "string", + "format": "uuid", + "description": "The UUID of the host aggregate. New in version 2.41", + "x-openstack": {"min-ver": "2.41"}, + }, + }, +} + +AGGREGATE_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "Aggregate object.", + "properties": {"aggregate": copy.deepcopy(AGGREGATE_SCHEMA)}, +} + + +AGGREGATE_LIST_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "The list of existing aggregates.", + "properties": { + "aggregates": { + "type": "array", + "description": "The list of existing aggregates.", + "items": copy.deepcopy(AGGREGATE_SCHEMA), + } + }, +} + +VOLUME_SNAPSHOT_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "A partial representation of a snapshot that is used to create a snapshot.", + "properties": { + "snapshot": { + "type": "object", + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Its the same arbitrary string which was sent in request body.", + }, + "volumeId": { + "type": "string", + "format": "uuid", + "description": "The source volume ID.", + }, + }, + } + }, +} + +AZ_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "zoneName": { + "type": "string", + "description": "The availability zone name.", + }, + "zoneState": { + "type": "object", + "description": "The current state of the availability zone.", + "properties": { + "available": { + "type": "boolean", + "description": "Returns true if the availability zone is available.", + } + }, + }, + "hosts": {"type": "null", "description": "It is always null."}, + }, +} + +AZ_DETAIL_SCHEMA: dict[str, Any] = copy.deepcopy(AZ_SCHEMA) +AZ_DETAIL_SCHEMA["properties"]["hosts"] = { + "type": "object", + "description": "An object containing a list of host information. The host information is comprised of host and service objects. The service object returns three parameters representing the states of the service: active, available, and updated_at.", + "examples": { + "JSON request": { + "conductor": { + "nova-conductor": { + "active": True, + "available": True, + "updated_at": None, + } + }, + "scheduler": { + "nova-scheduler": { + "active": True, + "available": True, + "updated_at": None, + } + }, + } + }, +} + +AZ_LIST_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "availabilityZoneInfo": { + "type": "array", + "description": "The list of availability zone information.", + "items": copy.deepcopy(AZ_SCHEMA), + } + }, +} +AZ_LIST_DETAIL_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "availabilityZoneInfo": { + "type": "array", + "description": "The list of availability zone information.", + "items": copy.deepcopy(AZ_DETAIL_SCHEMA), + } + }, +} + +CONSOLE_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "Show Console Connection Information Response", + "properties": { + "console": { + "type": "object", + "description": "The console object.", + "properties": { + "instance_uuid": { + "type": "string", + "format": "uuid", + "description": "The UUID of the server.", + }, + "host": { + "type": "string", + "description": "The name or ID of the host.", + }, + "port": {"type": "integer", "description": "The port number."}, + "internal_access_path": { + "type": "string", + "description": "The id representing the internal access path.", + }, + }, + "required": ["instance_uuid", "port"], + } + }, +} + +REMOTE_CONSOLE_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "Create Console Response", + "properties": { + "remote_console": { + "type": "object", + "description": "The remote console object.", + "properties": { + "protocol": { + "type": "string", + "enum": remote_consoles.create_v28["properties"][ + "remote_console" + ]["properties"]["protocol"]["enum"], + "description": "The protocol of remote console. The valid values are vnc, spice, rdp, serial and mks. The protocol mks is added since Microversion 2.8.", + }, + "type": { + "type": "string", + "enum": remote_consoles.create_v28["properties"][ + "remote_console" + ]["properties"]["type"]["enum"], + "description": "The type of remote console. The valid values are novnc, rdp-html5, spice-html5, serial, and webmks. The type webmks is added since Microversion 2.8.", + }, + "url": { + "type": "string", + "format": "uri", + "description": "The URL is used to connect the console.", + }, + }, + } + }, +} + +HYPERVISOR_SHORT_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "The hypervisor object.", + "properties": { + "hypervisor_hostname": { + "type": "string", + "description": "The hypervisor host name provided by the Nova virt driver. For the Ironic driver, it is the Ironic node uuid.", + }, + "id": { + "type": "string", + "description": "The id of the hypervisor. From version 2.53 it is a string as UUID", + }, + "state": { + "type": "string", + "enum": ["up", "down"], + "description": "The state of the hypervisor.", + }, + "status": { + "type": "string", + "enum": ["disabled", "enabled"], + "description": "The status of the hypervisor.", + }, + "servers": { + "type": "array", + "description": "A list of server objects. This field has become mandatory in microversion 2.75. If no servers is on hypervisor then empty list is returned. New in version 2.53", + "x-openstack": {"min-ver": "2.53"}, + "items": { + "type": "object", + "properties": { + "uuid": { + "type": "string", + "format": "uuid", + "description": "The server ID.", + }, + "name": { + "type": "string", + "description": "The server name.", + }, + }, + }, + }, + }, +} + +HYPERVISOR_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "The hypervisor object.", + "properties": { + "cpu_info": { + "type": "object", + "description": "A dictionary that contains cpu information like arch, model, vendor, features and topology. The content of this field is hypervisor specific.", + "additionalProperties": True, + "x-openstack": {"max-ver": "2.87"}, + }, + "current_workload": { + "type": "integer", + "description": "The current_workload is the number of tasks the hypervisor is responsible for. This will be equal or greater than the number of active VMs on the system (it can be greater when VMs are being deleted and the hypervisor is still cleaning up). Available until version 2.87", + "x-openstack": {"max-ver": "2.87"}, + }, + "disk_available_least": { + "type": "integer", + "description": "The actual free disk on this hypervisor(in GiB). If allocation ratios used for overcommit are configured, this may be negative. This is intentional as it provides insight into the amount by which the disk is overcommitted. Available until version 2.87", + "x-openstack": {"max-ver": "2.87"}, + }, + "host_ip": { + "type": "string", + "format": "ip", + "description": "The IP address of the hypervisor’s host.", + }, + "free_disk_gb": { + "type": "integer", + "description": "The free disk remaining on this hypervisor(in GiB). This does not take allocation ratios used for overcommit into account so this value may be negative. Available until version 2.87", + "x-openstack": {"max-ver": "2.87"}, + }, + "free_ram_mb": { + "type": "integer", + "description": "The free RAM in this hypervisor(in MiB). This does not take allocation ratios used for overcommit into account so this value may be negative. Available until version 2.87", + "x-openstack": {"max-ver": "2.87"}, + }, + "hypervisor_hostname": { + "type": "string", + "description": "The hypervisor host name provided by the Nova virt driver. For the Ironic driver, it is the Ironic node uuid.", + }, + "hypervisor_type": { + "type": "string", + "description": "The hypervisor type.", + }, + "hypervisor_version": { + "type": "integer", + "description": "The hypervisor version.", + }, + "local_gb": { + "type": "integer", + "x-openstack": { + "max-ver": "2.87", + "description": "The disk in this hypervisor (in GiB). This does not take allocation ratios used for overcommit into account so there may be disparity between this and the used count.", + }, + }, + "local_gb_used": { + "type": "integer", + "x-openstack": { + "max-ver": "2.87", + "description": "The disk used in this hypervisor (in GiB).", + }, + }, + "memory_mb": { + "type": "integer", + "x-openstack": { + "max-ver": "2.87", + "description": "The memory of this hypervisor (in MiB). This does not take allocation ratios used for overcommit into account so there may be disparity between this and the used count.", + }, + }, + "memory_mb_used": { + "type": "integer", + "x-openstack": { + "max-ver": "2.87", + "description": "The memory used in this hypervisor (in MiB).", + }, + }, + "running_vms": { + "type": "integer", + "x-openstack": { + "max-ver": "2.87", + "description": "The number of running VMs on this hypervisor. ", + }, + }, + "service": { + "type": "object", + "description": "The hypervisor service object.", + "properties": { + "host": { + "type": "string", + "description": "The name of the host.", + }, + "id": { + "type": ["integer", "string"], + "format": "uuid", + "description": "The id of the service.", + }, + "disabled_reason": { + "type": ["string", "null"], + "description": "The disable reason of the service, null if the service is enabled or disabled without reason provided.", + }, + }, + }, + "uptime": { + "type": "string", + "description": "The total uptime of the hypervisor and information about average load. Only reported for active hosts where the virt driver supports this feature.", + "x-openstack": {"min-ver": "2.87"}, + }, + "vcpus": { + "type": "integer", + "x-openstack": {"max-ver": "2.87"}, + "description": "The number of vCPU in this hypervisor. This does not take allocation ratios used for overcommit into account so there may be disparity between this and the used count.", + }, + "vcpus_used": { + "type": "integer", + "x-openstack": {"max-ver": "2.87"}, + "description": "The number of vCPU used in this hypervisor.", + }, + **HYPERVISOR_SHORT_SCHEMA["properties"], + }, +} + +HYPERVISOR_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"hypervisor": copy.deepcopy(HYPERVISOR_SCHEMA)}, +} + + +HYPERVISOR_LIST_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "hypervisors": { + "type": "array", + "description": "An array of hypervisor information.", + "items": copy.deepcopy(HYPERVISOR_SHORT_SCHEMA), + }, + "hypervisor_links": LINKS_SCHEMA, + }, +} + +HYPERVISOR_LIST_DETAIL_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "hypervisors": { + "type": "array", + "description": "An array of hypervisor information.", + "items": copy.deepcopy(HYPERVISOR_SCHEMA), + }, + "hypervisor_links": LINKS_SCHEMA, + }, +} + +INSTANCE_USAGE_AUDIT_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "The object of instance usage audit log information.", + "properties": { + "hosts_not_run": { + "type": "array", + "items": {"type": "string"}, + "description": "A list of the hosts whose instance audit tasks have not run.", + }, + "log": { + "type": "object", + "description": "The object of instance usage audit logs.", + }, + "errors": {"type": "integer", "description": "The number of errors."}, + "instances": { + "type": "integer", + "description": "The number of instances.", + }, + "message": { + "type": "string", + "description": "The log message of the instance usage audit task.", + }, + "state": { + "type": "string", + "enum": ["DONE", "RUNNING"], + "description": "The state of the instance usage audit task. DONE or RUNNING.", + }, + "num_hosts": { + "type": "integer", + "description": "The number of the hosts.", + }, + "num_hosts_done": { + "type": "integer", + "description": "The number of the hosts whose instance audit tasks have been done.", + }, + "num_hosts_not_run": { + "type": "integer", + "description": "The number of the hosts whose instance audit tasks have not run.", + }, + "num_hosts_running": { + "type": "integer", + "description": "The number of the hosts whose instance audit tasks are running.", + }, + "overall_status": { + "type": "string", + "description": ( + "The overall status of instance audit tasks." + "M of N hosts done. K errors." + "The M value is the number of hosts whose instance audit tasks have been done in the period. The N value is the number of all hosts. The K value is the number of hosts whose instance audit tasks cause errors. If instance audit tasks have been done at all hosts in the period, the overall status is as follows:" + "ALL hosts done. K errors." + ), + }, + "period_beginning": { + "type": "string", + "format": "date-time", + "description": "The beginning time of the instance usage audit period. For example, 2016-05-01 00:00:00.", + }, + "period_ending": { + "type": "string", + "format": "date-time", + "description": "The ending time of the instance usage audit period. For example, 2016-06-01 00:00:00.", + }, + "total_errors": { + "type": "integer", + "description": "The total number of instance audit task errors.", + }, + "total_instances": { + "type": "integer", + "description": "The total number of VM instances in the period.", + }, + }, +} + + +KEYPAIR_SHORT_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "Keypair object", + "properties": { + "name": {"type": "string", "description": "The name for the keypair"}, + "public_key": { + "type": "string", + "description": "The keypair public key.", + }, + "fingerprint": { + "type": "string", + "description": "The fingerprint for the keypair.", + }, + "type": { + "type": "string", + "description": "The type of the keypair. Allowed values are ssh or x509.", + "x-openstack": {"min-ver": "2.2"}, + }, + }, +} + +KEYPAIR_LIST_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "keypairs": { + "type": "array", + "description": "Array of Keypair objects", + "items": { + "type": "object", + "properties": { + "keypair": copy.deepcopy(KEYPAIR_SHORT_SCHEMA), + }, + }, + }, + "keypairs_links": copy.deepcopy(LINKS_SCHEMA), + }, +} + +KEYPAIR_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "Keypair object", + "properties": { + "user_id": { + "type": "string", + "description": "The user_id for a keypair.", + }, + "deleted": { + "type": "boolean", + "description": "A boolean indicates whether this keypair is deleted or not. The value is always false (not deleted).", + }, + "created_at": { + "type": "string", + "format": "date-time", + "description": "The date and time when the resource was created.", + }, + "deleted_at": { + "type": ["string", "null"], + "format": "date-time", + "description": "It is always null.", + }, + "updated_at": { + "type": ["string", "null"], + "format": "date-time", + "description": "It is always null.", + }, + "id": {"type": "integer", "description": "The keypair ID."}, + **copy.deepcopy(KEYPAIR_SHORT_SCHEMA["properties"]), + }, +} + +KEYPAIR_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "Keypair object", + "properties": {"keypair": KEYPAIR_SCHEMA}, +} + +KEYPAIR_CREATED_SCHEMA: dict[str, Any] = copy.deepcopy( + KEYPAIR_CONTAINER_SCHEMA +) +KEYPAIR_CREATED_SCHEMA["properties"]["keypair"]["properties"][ + "private_key" +] = { + "type": "string", + "description": "If you do not provide a public key on create, a new keypair will be built for you, and the private key will be returned during the initial create call. Make sure to save this, as there is no way to get this private key again in the future.", + "x-openstack": {"max-ver": "2.91"}, +} + +MIGRATION_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "Migration object", + "properties": { + "created_at": { + "type": "string", + "format": "date-time", + "description": "The date and time when the resource was created.", + }, + "updated_at": { + "type": "string", + "format": "date-time", + "description": "The date and time when the resource was updated.", + }, + "dest_compute": { + "type": "string", + "description": "The target compute for a migration.", + }, + "dest_host": { + "type": "string", + "description": "The target host for a migration.", + }, + "dest_node": { + "type": "string", + "description": "The target node for a migration.", + }, + "id": { + "type": "integer", + "description": "The ID of the server migration.", + }, + "instance_uuid": { + "type": "string", + "format": "uuid", + "description": "The UUID of the server.", + }, + "new_instance_type_id": { + "type": "integer", + "description": "In resize case, the flavor ID for resizing the server. In the other cases, this parameter is same as the flavor ID of the server when the migration was started.", + }, + "old_instance_type_id": { + "type": "integer", + "description": "The flavor ID of the server when the migration was started.", + }, + "source_compute": { + "type": "string", + "description": "The source compute for a migration.", + }, + "source_node": { + "type": "string", + "description": "The source node for a migration.", + }, + "status": { + "type": "string", + "description": "The current status of the migration.", + }, + "project_id": { + "type": ["string", "null"], + "description": "The ID of the project which initiated the server migration. The value may be null for older migration records.", + "x-openstack": {"min-ver": "2.80"}, + }, + "user_id": { + "type": ["string", "null"], + "description": "The ID of the user which initiated the server migration. The value may be null for older migration records.", + "x-openstack": {"min-ver": "2.80"}, + }, + "migration_type": { + "type": "string", + "enum": ["live-migration", "migration", "resize"], + "description": "The type of the server migration. This is one of live-migration, migration, resize and evacuation. New in version 2.23", + "x-openstack": {"min-ver": "2.23"}, + }, + "uuid": { + "type": "string", + "format": "uuid", + "description": "The UUID of the migration.", + "x-openstack": {"min-ver": "2.59"}, + }, + }, +} + +MIGRATION_LIST_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "List of migration objects", + "properties": { + "migrations": { + "type": "array", + "items": copy.deepcopy(MIGRATION_SCHEMA), + }, + "migrations_links": { + "x-openstack": {"min-ver": "2.59"}, + **copy.deepcopy(LINKS_SCHEMA), + }, + }, +} + +SERVER_MIGRATION_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "Migration object", + "properties": { + "created_at": { + "type": "string", + "format": "date-time", + "description": "The date and time when the resource was created.", + }, + "updated_at": { + "type": "string", + "format": "date-time", + "description": "The date and time when the resource was updated.", + }, + "dest_compute": { + "type": "string", + "description": "The target compute for a migration.", + }, + "dest_host": { + "type": "string", + "description": "The target host for a migration.", + }, + "dest_node": { + "type": "string", + "description": "The target node for a migration.", + }, + "id": { + "type": "integer", + "description": "The ID of the server migration.", + }, + "source_compute": { + "type": "string", + "description": "The source compute for a migration.", + }, + "source_node": { + "type": "string", + "description": "The source node for a migration.", + }, + "status": { + "type": "string", + "description": "The current status of the migration.", + }, + "project_id": { + "type": ["string", "null"], + "description": "The ID of the project which initiated the server migration. The value may be null for older migration records.", + "x-openstack": {"min-ver": "2.80"}, + }, + "user_id": { + "type": ["string", "null"], + "description": "The ID of the user which initiated the server migration. The value may be null for older migration records.", + "x-openstack": {"min-ver": "2.80"}, + }, + "uuid": { + "type": "string", + "format": "uuid", + "description": "The UUID of the migration.", + "x-openstack": {"min-ver": "2.59"}, + }, + "disk_processed_bytes": { + "type": "integer", + "description": "The amount of disk, in bytes, that has been processed during the migration.", + }, + "disk_remaining_bytes": { + "type": "integer", + "description": "The amount of disk, in bytes, that still needs to be migrated.", + }, + "disk_total_bytes": { + "type": "integer", + "description": "The total amount of disk, in bytes, that needs to be migrated.", + }, + "memory_processed_bytes": { + "type": "integer", + "description": "The amount of memory, in bytes, that has been processed during the migration.", + }, + "memory_remaining_bytes": { + "type": "integer", + "description": "The amount of memory, in bytes, that still needs to be migrated.", + }, + "memory_total_bytes": { + "type": "integer", + "description": "The total amount of memory, in bytes, that needs to be migrated.", + }, + "server_uuid": { + "type": "string", + "format": "uuid", + "description": "The UUID of the server.", + }, + }, +} +SERVER_MIGRATION_LIST_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "List of migration objects", + "properties": { + "migrations": { + "type": "array", + "items": copy.deepcopy(SERVER_MIGRATION_SCHEMA), + }, + }, +} +SERVER_MIGRATION_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"migration": copy.deepcopy(SERVER_MIGRATION_SCHEMA)}, +} + +QUOTA_SET_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "Quota Set object", + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "The UUID of the tenant/user the quotas listed for.", + }, + **quota_sets.quota_resources, + }, +} + +QUOTA_SET_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"quota_set": QUOTA_SET_SCHEMA}, +} +QUOTA_DETAIL_SCHEMA: dict[str, Any] = { + "in_use": {"type": "integer"}, + "limit": {"type": "integer"}, + "reserved": {"type": "integer"}, +} +QUOTA_SET_DETAIL_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "quota_set": { + "type": "object", + "description": "A quota_set object.", + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "The UUID of the tenant/user the quotas listed for.", + }, + "instances": { + "type": "object", + "description": "The object of detailed servers quota, including in_use, limit and reserved number of instances.", + "properties": QUOTA_DETAIL_SCHEMA, + }, + "cores": { + "type": "object", + "description": "The object of detailed key pairs quota, including in_use, limit and reserved number of key pairs.", + "properties": QUOTA_DETAIL_SCHEMA, + }, + "ram": { + "type": "object", + "description": "The object of detailed key ram quota, including in_use, limit and reserved number of ram.", + "properties": QUOTA_DETAIL_SCHEMA, + }, + "floating_ips": { + "type": "object", + "description": "The object of detailed floating ips quota, including in_use, limit and reserved number of floating ips.", + "properties": QUOTA_DETAIL_SCHEMA, + "x-openstack": {"max-ver": "2.35"}, + }, + "fixed_ips": { + "type": "object", + "description": "The object of detailed fixed ips quota, including in_use, limit and reserved number of fixed ips.", + "properties": QUOTA_DETAIL_SCHEMA, + "x-openstack": {"max-ver": "2.35"}, + }, + "metadata_items": { + "type": "object", + "description": "The object of detailed key metadata items quota, including in_use, limit and reserved number of metadata items.", + "properties": QUOTA_DETAIL_SCHEMA, + }, + "key_pairs": { + "type": "object", + "description": "The object of detailed key pairs quota, including in_use, limit and reserved number of key pairs.", + "properties": QUOTA_DETAIL_SCHEMA, + }, + "security_groups": { + "type": "object", + "description": "The object of detailed security groups, including in_use, limit and reserved number of security groups.", + "properties": QUOTA_DETAIL_SCHEMA, + "x-openstack": {"max-ver": "2.35"}, + }, + "security_group_rules": { + "type": "object", + "description": "The object of detailed security group rules quota, including in_use, limit and reserved number of security group rules.", + "properties": QUOTA_DETAIL_SCHEMA, + "x-openstack": {"max-ver": "2.35"}, + }, + "injected_files": { + "type": "object", + "description": "The object of detailed injected files quota, including in_use, limit and reserved number of injected files.", + "properties": QUOTA_DETAIL_SCHEMA, + "x-openstack": {"max-ver": "2.56"}, + }, + "injected_files_content_bytes": { + "type": "object", + "description": "The object of detailed injected file content bytes quota, including in_use, limit and reserved number of injected file content bytes.", + "properties": QUOTA_DETAIL_SCHEMA, + "x-openstack": {"max-ver": "2.56"}, + }, + "injected_files_path_bytes": { + "type": "object", + "description": "The object of detailed injected file path bytes quota, including in_use, limit and reserved number of injected file path bytes.", + "properties": QUOTA_DETAIL_SCHEMA, + "x-openstack": {"max-ver": "2.56"}, + }, + "server_groups": { + "type": "object", + "description": "The object of detailed server groups, including in_use, limit and reserved number of server groups.", + "properties": QUOTA_DETAIL_SCHEMA, + }, + "server_group_members": { + "type": "object", + "description": "The object of detailed server group members, including in_use, limit and reserved number of server group members.", + "properties": QUOTA_DETAIL_SCHEMA, + }, + "networks": { + "type": "object", + "description": "The number of private networks that can be created per project.", + "properties": QUOTA_DETAIL_SCHEMA, + "x-openstack": {"max-ver": "2.35"}, + }, + }, + } + }, +} +# TODO(gtema): class set props are not quota_set props, but for now keep this way +QUOTA_CLASS_SET_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "Quota Class Set object", + "properties": { + "id": { + "type": "string", + "description": "The ID of the quota class. Nova supports the default Quota Class only.", + }, + **quota_sets.quota_resources, + }, +} + +QUOTA_CLASS_SET_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"quota_class_set": QUOTA_CLASS_SET_SCHEMA}, +} + +EXTERNAL_EVENTS_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "events": { + "description": "List of external events to process.", + "type": "array", + "items": { + "type": "object", + "properties": { + "name": { + "type": "string", + "enum": [ + "network-changed", + "network-vif-plugged", + "network-vif-unplugged", + "network-vif-deleted", + "volume-extended", + "power-update", + "accelerator-request-bound", + ], + "description": "The event name.", + }, + "server_uuid": { + "type": "string", + "format": "uuid", + "description": "The UUID of the server instance to which the API dispatches the event. You must assign this instance to a host. Otherwise, this call does not dispatch the event to the instance.", + }, + "status": { + "type": "string", + "enum": ["failed", "completed", "in-progress"], + "description": "The event status. Default is `completed`.", + }, + "tag": { + "type": "string", + "description": "A string value that identifies the event.", + }, + }, + }, + } + }, +} + +SERVER_GROUP_POLICIES = [ + "anti-affinity", + "affinity", + "soft-anti-affinity", + "soft-affinity", +] + +SERVER_GROUP_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "The UUID of the server group.", + "readOnly": True, + }, + "members": { + "type": "array", + "items": {"type": "string"}, + "description": "The list of members in the server group", + }, + "metadata": { + "description": "Metadata key and value pairs. The maximum size for each metadata key and value pair is 255 bytes. It’s always empty and only used for keeping compatibility.", + "x-openstack": {"max-ver": "2.63"}, + **parameter_types.metadata, + }, + "name": { + "type": "string", + "description": "A name identifying the server group", + }, + "policies": { + "type": "array", + "items": {"type": "string", "enum": SERVER_GROUP_POLICIES}, + "description": "A list of exactly one policy name to associate with the server group.", + "maxItems": 1, + "x-openstack": {"max-ver": "2.63"}, + }, + "policy": { + "type": "string", + "description": "The policy field represents the name of the policy", + "enum": SERVER_GROUP_POLICIES, + "x-openstack": {"min-ver": "2.64"}, + }, + "project_id": { + "type": "string", + "description": "The project ID who owns the server group.", + "x-openstack": {"min-ver": "2.13"}, + }, + "rules": { + "type": "object", + "description": "The rules field, which is a dict, can be applied to the policy. Currently, only the max_server_per_host rule is supported for the anti-affinity policy. The max_server_per_host rule allows specifying how many members of the anti-affinity group can reside on the same compute host. If not specified, only one member from the same anti-affinity group can reside on a given host.", + "properties": { + "max_server_per_host": parameter_types.positive_integer, + }, + "additionalProperties": False, + "x-openstack": {"min-ver": "2.64"}, + }, + "user_id": { + "type": "string", + "description": "The user ID who owns the server group", + "x-openstack": {"min-ver": "2.13"}, + }, + }, +} + +SERVER_GROUP_LIST_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "server_groups": { + "description": "The list of existing server groups.", + "type": "array", + "items": SERVER_GROUP_SCHEMA, + } + }, +} +SERVER_GROUP_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"server_group": SERVER_GROUP_SCHEMA}, +} + +SERVICE_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "zone": { + "type": "string", + "description": "The availability zone of service", + "x-openstack-sdk-name": "availability_zone", + }, + "binary": {"type": "string", "description": "Binary name of service"}, + "disabled_reason": { + "type": "string", + "description": "Disabled reason of service", + }, + "host": { + "type": "string", + "description": "The name of the host where service runs", + }, + "id": { + "type": ["integer", "string"], + "format": "uuid", + "description": "Id of the resource", + "readOnly": True, + }, + "forced_down": { + "type": "boolean", + "description": "Whether or not this service was forced down manually by an administrator after the service was fenced", + "x-openstack": {"min-ver": "2.11"}, + }, + "name": {"type": "string", "description": "Service name"}, + "state": {"type": "string", "description": "State of service"}, + "status": { + "type": "string", + "description": "Status of service", + "enum": ["disabled", "enabled"], + "readOnly": True, + }, + "updated_at": { + "type": "string", + "description": "The date and time when the resource was updated", + "readOnly": True, + }, + }, +} + +SERVICE_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"service": SERVICE_SCHEMA}, +} + +SERVICE_LIST_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "services": { + "type": "array", + "items": SERVICE_SCHEMA, + "description": "A list of service objects.", + } + }, +} + +TENANT_USAGE_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "tenant_id": { + "type": "string", + "description": "The UUID of the project in a multi-tenancy cloud.", + }, + "start": { + "type": "string", + "format": "date-time", + "description": "The beginning time to calculate usage statistics on compute and storage resources.", + }, + "stop": { + "type": "string", + "format": "date-time", + "description": "The ending time to calculate usage statistics on compute and storage resources.", + }, + "total_hours": { + "type": "number", + "format": "float", + "description": "The total duration that servers exist (in hours).", + }, + "total_local_gb_usage": { + "type": "number", + "format": "float", + "description": "Multiplying the server disk size (in GiB) by hours the server exists, and then adding that all together for each server.", + }, + "total_memory_mb_usage": { + "type": "number", + "format": "float", + "description": "Multiplying the server memory size (in MiB) by hours the server exists, and then adding that all together for each server.", + }, + "total_vcpus_usage": { + "type": "number", + "format": "float", + "description": "Multiplying the number of virtual CPUs of the server by hours the server exists, and then adding that all together for each server.", + }, + "server_usages": { + "type": "array", + "description": "A list of the server usage objects.", + "items": { + "type": "object", + "properties": { + "ended_at": { + "type": "string", + "format": "date-time", + "description": "The date and time when the server was deleted.", + }, + "flavor": { + "type": "string", + "description": "The display name of a flavor.", + }, + "hours": { + "type": "number", + "format": "float", + "description": "The duration that the server exists (in hours).", + }, + "instance_id": { + "type": "string", + "format": "uuid", + "description": "The UUID of the server.", + }, + "local_gb": { + "type": "integer", + "description": "The sum of the root disk size of the server and the ephemeral disk size of it (in GiB).", + }, + "memory_mb": { + "type": "integer", + "description": "The memory size of the server (in MiB).", + }, + "name": { + "type": "string", + "description": "The server name.", + }, + "tenant_id": { + "type": "string", + "description": "The UUID of the project in a multi-tenancy cloud.", + }, + "started_at": { + "type": "string", + "format": "date-time", + "description": "The date and time when the server was launched.", + }, + "state": { + "type": "string", + "description": "The VM state.", + }, + "uptime": { + "type": "integer", + "description": "The uptime of the server.", + }, + "vcpus": { + "type": "integer", + "description": "The number of virtual CPUs that the server uses.", + }, + }, + }, + }, + }, +} + +TENANT_USAGE_LIST_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "tenant_usages": { + "type": "array", + "items": TENANT_USAGE_SCHEMA, + "description": "A list of the tenant usage objects.", + }, + "tenant_usages_links": LINKS_SCHEMA, + }, +} + +SERVER_SHORT_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "Server object", + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "Server ID.", + }, + "name": { + "type": "string", + "format": "uuid", + "description": "Server name.", + }, + }, +} +SERVER_ADDRESSES_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "A dictionary of addresses this server can be accessed through. The dictionary contains keys such as ``private`` and ``public``, each containing a list of dictionaries for addresses of that type. The addresses are contained in a dictionary with keys ``addr`` and ``version``, which is either 4 or 6 depending on the protocol of the IP address.", + "additionalProperties": { + "type": "array", + "items": { + "type": "object", + "properties": { + "addr": { + "description": "The IP address.", + **parameter_types.ip_address, + }, + "version": { + "type": "integer", + "enum": [4, 6], + "description": "The IP version of the address associated with server.", + }, + }, + }, + }, +} +SERVER_ADDRESSES_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "The addresses information for the server.", + "properties": {"addresses": SERVER_ADDRESSES_SCHEMA}, +} + + +SERVER_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "Server object", + "properties": { + "accessIPv4": { + "description": "IPv4 address that should be used to access this server. May be automatically set by the provider.", + **parameter_types.ipv4, + }, + "accessIPv6": { + "description": "IPv6 address that should be used to access this server. May be automatically set by the provider.", + **parameter_types.ipv6, + }, + "addresses": SERVER_ADDRESSES_SCHEMA, + "os-extended-volumes:volumes_attached": { + "type": "array", + "items": {"type": "object"}, + "description": "A list of an attached volumes. Each item in the list contains at least an 'id' key to identify the specific volumes.", + }, + "OS-EXT-AZ:availability_zone": { + "type": "string", + "description": "The name of the availability zone this server is a part of.", + }, + "OS-EXT-SRV-ATTR:host": { + "type": "string", + "description": "The name of the compute host on which this instance is running. Appears in the response for administrative users only.", + }, + "config_drive": { + "type": "string", + "description": "Indicates whether a configuration drive enables metadata injection. Not all cloud providers enable this feature.", + }, + "created": { + "type": "string", + "format": "date-time", + "description": "Timestamp of when the server was created.", + "readOnly": True, + }, + "description": { + "type": "string", + "description": "The description of the server. Before microversion 2.19 this was set to the server name.", + }, + "OS-DCF:diskConfig": { + "type": "string", + "description": "The disk configuration. Either AUTO or MANUAL.", + "enum": ["AUTO", "MANUAL"], + }, + "fault": { + "type": "object", + "description": "A fault object. Only available when the server status is ERROR or DELETED and a fault occurred.", + "properties": { + "code": { + "type": "integer", + "description": "The error response code.", + }, + "created": { + "type": "string", + "format": "date-time", + "description": "The date and time when the exception was raised.", + }, + "message": { + "type": "string", + "description": "The error message.", + }, + "details": { + "type": "string", + "description": "The stack trace. It is available if the response code is not 500 or you have the administrator privilege", + }, + }, + "additionalProperties": False, + }, + "flavor": { + "type": "object", + "description": "The flavor property as returned from server.", + "properties": { + "id": { + "type": "string", + "description": "The ID of the flavor. While people often make this look like an int, this is really a string.", + "x-openstack": {"max-ver": "2.46"}, + }, + "links": { + "description": "Links to the flavor resource", + "x-openstack": {"max-ver": "2.46"}, + **LINKS_SCHEMA, + }, + "vcpus": { + "type": "integer", + "description": "The number of virtual CPUs that were allocated to the server.", + "x-openstack": {"min-ver": "2.47"}, + }, + "ram": { + "type": "integer", + "description": "The amount of RAM a flavor has, in MiB.", + "x-openstack": {"min-ver": "2.47"}, + }, + "disk": { + "type": "integer", + "description": "The size of the root disk that was created in GiB.", + "x-openstack": {"min-ver": "2.47"}, + }, + "ephemeral": { + "type": "integer", + "description": "The size of the ephemeral disk that was created, in GiB.", + "x-openstack": {"min-ver": "2.47"}, + }, + "swap": { + "type": "integer", + "description": "The size of a dedicated swap disk that was allocated, in MiB.", + "x-openstack": {"min-ver": "2.47"}, + }, + "original_name": { + "type": "string", + "description": "The display name of a flavor.", + "x-openstack": {"min-ver": "2.47"}, + }, + "extra_specs": { + "description": "A dictionary of the flavor’s extra-specs key-and-value pairs. This will only be included if the user is allowed by policy to index flavor extra_specs.", + "x-openstack": {"min-ver": "2.47"}, + **parameter_types.metadata, + }, + }, + }, + "hostId": { + "type": "string", + "description": "An ID representing the host of this server.", + }, + "host_status": { + "type": ["string", "null"], + "description": "The host status.", + "enum": ["UP", "DOWN", "MAINTENANCE", "UNKNOWN", "", "null"], + "x-openstack": {"min-ver": "2.16"}, + }, + "OS-EXT-SRV-ATTR:hostname": { + "type": "string", + "description": "The hostname set on the instance when it is booted. By default, it appears in the response for administrative users only.", + "x-openstack": {"min-ver": "2.3"}, + }, + "OS-EXT-SRV-ATTR:hypervisor_hostname": { + "type": "string", + "description": "The hypervisor host name. Appears in the response for administrative users only.", + "x-openstack-sdk-name": "hypervisor_hostname", + }, + "id": { + "type": "string", + "format": "uuid", + "description": "Id of the server", + "readOnly": True, + }, + "image": { + "type": "object", + "description": "The image property as returned from server.", + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "The image ID", + }, + "links": {"description": "Image links", **LINKS_SCHEMA}, + }, + }, + "OS-EXT-SRV-ATTR:instance_name": { + "type": "string", + "description": "The instance name. The Compute API generates the instance name from the instance name template. Appears in the response for administrative users only.", + }, + "locked": { + "type": "boolean", + "description": "True if the instance is locked otherwise False.", + "x-openstack": {"min-ver": "2.9"}, + }, + "OS-EXT-SRV-ATTR:kernel_id": { + "type": "string", + "description": "The UUID of the kernel image when using an AMI. Will be null if not. By default, it appears in the response for administrative users only.", + "x-openstack": {"min-ver": "2.3"}, + }, + "key_name": { + "type": "string", + "description": "The name of an associated keypair", + }, + "OS-EXT-SRV-ATTR:launch_index": { + "type": "integer", + "description": "When servers are launched via multiple create, this is the sequence in which the servers were launched. By default, it appears in the response for administrative users only.", + "x-openstack": {"min-ver": "2.3"}, + }, + "OS-SRV-USG:launched_at": { + "type": "string", + "description": "The timestamp when the server was launched.", + }, + "links": { + "description": "A list of dictionaries holding links relevant to this server.", + **LINKS_SCHEMA, + }, + "metadata": { + "type": "object", + "description": "A dictionary of metadata key-and-value pairs, which is maintained for backward compatibility.", + **parameter_types.metadata, + }, + "name": {"type": "string", "description": "Name"}, + "OS-EXT-STS:power_state": { + "type": "integer", + "description": ( + "The power state of this server. This is an enum value that is mapped as:\n" + " - 0: NOSTATE\n" + " - 1: RUNNING\n" + " - 3: PAUSED\n" + " - 4: SHUTDOWN\n" + " - 6: CRASHED\n" + " - 7: SUSPENDED\n" + ), + }, + "progress": { + "type": "integer", + "description": "While the server is building, this value represents the percentage of completion. Once it is completed, it will be 100.", + }, + "tenant_id": { + "type": "string", + "format": "uuid", + "description": "The ID of the project this server is associated with.", + }, + "OS-EXT-SRV-ATTR:ramdisk_id": { + "type": "string", + "description": "The UUID of the ramdisk image when using an AMI. Will be null if not. By default, it appears in the response for administrative users only.", + "x-openstack": {"min-ver": "2.3"}, + }, + "OS-EXT-SRV-ATTR:reservation_id": { + "type": "string", + "description": "The reservation id for the server. This is an id that can be useful in tracking groups of servers created with multiple create, that will all have the same reservation_id. By default, it appears in the response for administrative users only.", + "x-openstack": {"min-ver": "2.3"}, + }, + "OS-EXT-SRV-ATTR:root_device_name": { + "type": "string", + "description": "The root device name for the instance By default, it appears in the response for administrative users only.", + "x-openstack": {"min-ver": "2.3"}, + }, + "security_groups": { + "type": "array", + "items": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The security group name", + } + }, + }, + "description": "One or more security groups objects.", + }, + "server_groups": { + "type": "array", + "items": {"type": "string"}, + "description": "The UUIDs of the server groups to which the server belongs. Currently this can contain at most one entry.", + "x-openstack": {"min-ver": "2.71"}, + }, + "status": { + "type": "string", + "description": "The state this server is in. Valid values include ``ACTIVE``, ``BUILDING``, ``DELETED``, ``ERROR``, ``HARD_REBOOT``, ``PASSWORD``, ``PAUSED``, ``REBOOT``, ``REBUILD``, ``RESCUED``, ``RESIZED``, ``REVERT_RESIZE``, ``SHUTOFF``, ``SOFT_DELETED``, ``STOPPED``, ``SUSPENDED``, ``UNKNOWN``, or ``VERIFY_RESIZE``.", + "readOnly": True, + }, + "tags": { + "type": "array", + "items": {"type": "string"}, + "description": "A list of tags. The maximum count of tags in this list is 50.", + "x-openstack": {"min-ver": "2.26"}, + }, + "OS-EXT-STS:task_state": { + "type": "string", + "description": "The task state of this server.", + }, + "OS-SRV-USG:terminated_at": { + "type": "string", + "description": "The timestamp when the server was terminated (if it has been).", + }, + "trusted_image_certificates": { + "type": ["array", "null"], + "items": {"type": "string"}, + "description": "A list of trusted certificate IDs, that were used during image signature verification to verify the signing certificate. The list is restricted to a maximum of 50 IDs. The value is null if trusted certificate IDs are not set.", + "x-openstack": {"min-ver": "2.63"}, + }, + "updated": { + "type": "string", + "format": "date-time", + "description": "Timestamp of when this server was last updated.", + "readOnly": True, + }, + "OS-EXT-SRV-ATTR:user_data": { + "type": "string", + "description": "Configuration information or scripts to use upon launch. Must be Base64 encoded.", + "x-openstack": {"min-ver": "2.3"}, + }, + "user_id": { + "type": "string", + "description": "The ID of the owners of this server.", + }, + "OS-EXT-STS:vm_state": { + "type": "string", + "description": "The VM state of this server.", + }, + }, +} + +SERVER_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"server": copy.deepcopy(SERVER_SCHEMA)}, +} + +SERVER_LIST_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "servers": { + "type": "array", + "items": copy.deepcopy(SERVER_SHORT_SCHEMA), + }, + "servers_links": LINKS_SCHEMA, + }, +} + +SERVER_LIST_DETAIL_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "servers": {"type": "array", "items": copy.deepcopy(SERVER_SCHEMA)}, + "servers_links": LINKS_SCHEMA, + }, +} +SERVER_CREATED_SCHEMA: dict[str, Any] = { + "oneOf": [ + { + "type": "object", + "description": "Created server object", + "properties": { + "server": { + "type": "object", + "properties": { + "OS-DCF:diskConfig": { + "type": "string", + "description": "The disk configuration. Either AUTO or MANUAL.", + "enum": ["AUTO", "MANUAL"], + }, + "adminPass": { + "type": "string", + "format": "password", + "description": "The administrative password for the server. If you set enable_instance_password configuration option to False, the API wouldn’t return the adminPass field in response.", + }, + "id": { + "type": "string", + "format": "uuid", + "description": "Id of the server", + "readOnly": True, + }, + "security_groups": { + "type": "array", + "items": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The security group name", + } + }, + }, + "description": "One or more security groups objects.", + }, + "links": LINKS_SCHEMA, + }, + } + }, + }, + { + "type": "object", + "properties": { + "reservation_id": { + "type": "string", + "description": "The reservation id for the server. This is an id that can be useful in tracking groups of servers created with multiple create, that will all have the same reservation_id.", + } + }, + }, + ] +} + +SERVER_ACTION_CREATE_IMAGE_RESPONSE_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "image_id": { + "type": "string", + "description": "The UUID for the resulting image snapshot.", + "x-openstack": {"min-ver": "2.45"}, + } + }, +} +SERVER_ACTION_NEW_ADMINPASS_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "adminPass": { + "type": "string", + "description": "An administrative password to access moved instance. If you set enable_instance_password configuration option to False, the API wouldn’t return the adminPass field in response.", + } + }, +} +SERVER_ACTION_GET_CONSOLE_OUTPUT_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "The console output as a string. Control characters will be escaped to create a valid JSON string.", + "properties": {"output": {"type": "string"}}, +} +SERVER_ACTION_REMOTE_CONSOLE_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "The remote console object.", + "properties": { + "console": { + "type": "object", + "properties": { + "type": { + "type": "string", + "description": "The type of the remote console", + "enum": ["rdp-html5", "serial", "spice-html5", "novnc"], + }, + "url": { + "type": "string", + "description": "The URL used to connect to the console.", + }, + }, + } + }, +} +SERVER_DIAGNOSTICS_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "cpu_details": { + "type": "array", + "items": {"type": "object"}, + "description": "The list of dictionaries with detailed information about VM CPUs.", + "x-openstack": {"min-ver": "2.48"}, + }, + "disk_details": { + "type": "array", + "items": {"type": "object"}, + "description": "The list of dictionaries with detailed information about VM disks.", + "x-openstack": {"min-ver": "2.48"}, + }, + "driver": { + "type": "string", + "description": "The driver on which the VM is running.", + "enum": ["libvirt", "xenapi", "hyperv", "vmwareapi", "ironic"], + "x-openstack": {"min-ver": "2.48"}, + }, + "config_drive": { + "type": "boolean", + "description": "Indicates whether or not a config drive was used for this server.", + "x-openstack": {"min-ver": "2.48"}, + }, + "hypervisor": { + "type": "string", + "description": "The hypervisor on which the VM is running.", + "x-openstack": {"min-ver": "2.48"}, + }, + "hypervisor_os": { + "type": "string", + "description": "The hypervisor OS.", + "x-openstack": {"min-ver": "2.48"}, + }, + "id": { + "type": "string", + "format": "uuid", + "description": "Id of the resource", + "readOnly": True, + }, + "memory_details": { + "type": "array", + "items": {"type": "object"}, + "description": "The dictionary with information about VM memory usage.", + "x-openstack": {"min-ver": "2.48"}, + }, + "name": {"type": "string", "description": "Name"}, + "nic_details": { + "type": "array", + "items": { + "type": "object", + "properties": { + "mac_address": {"type": "string", "description": ""}, + "rx_octets": {"type": "integer", "description": ""}, + "rx_errors": {"type": "integer", "description": ""}, + "rx_drop": {"type": "integer", "description": ""}, + "rx_packets": {"type": "integer", "description": ""}, + "rx_rate": {"type": "integer", "description": ""}, + "tx_octets": {"type": "integer", "description": ""}, + "tx_errors": {"type": "integer", "description": ""}, + "tx_drop": {"type": "integer", "description": ""}, + "tx_packets": {"type": "integer", "description": ""}, + "tx_rate": {"type": "integer", "description": ""}, + }, + }, + "description": "The list of dictionaries with detailed information about VM NICs.", + "x-openstack": {"min-ver": "2.48"}, + }, + "num_cpus": { + "type": "integer", + "description": "The number of vCPUs.", + "x-openstack": {"min-ver": "2.48"}, + }, + "num_disks": { + "type": "integer", + "description": "The number of disks.", + "x-openstack": {"min-ver": "2.48"}, + }, + "num_nics": { + "type": "integer", + "description": "The number of vNICs.", + "x-openstack": {"min-ver": "2.48"}, + }, + "state": { + "type": "string", + "description": "The current state of the VM.", + "enum": [ + "pending", + "running", + "paused", + "shutdown", + "crashed", + "suspended", + ], + "x-openstack": {"min-ver": "2.48"}, + }, + "uptime": { + "type": "integer", + "description": "The amount of time in seconds that the VM has been running.", + "x-openstack": {"min-ver": "2.48"}, + }, + }, +} + +SERVER_METADATA_LIST_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "Metadata key and value pairs. The maximum size for each metadata key and value pair is 255 bytes.", + "properties": {"metadata": parameter_types.metadata}, +} +SERVER_METADATA_ITEM_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "Metadata key and value pairs. The maximum size for each metadata key and value pair is 255 bytes.", + "properties": {"meta": {"maxProperties": 1, **parameter_types.metadata}}, +} + +SERVER_INSTANCE_ACTION_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "The instance action object.", + "properties": { + "action": {"type": "string", "description": "The name of the action."}, + "events": { + "type": "array", + "items": { + "type": "object", + "properties": { + "event": { + "type": "string", + "description": "The name of the event.", + }, + "start_time": { + "type": "string", + "format": "date-time", + "description": "The date and time when the event was started.", + }, + "finish_time": { + "type": "string", + "format": "date-time", + "description": "The date and time when the event was finished.", + }, + "result": { + "type": "string", + "description": "The result of the event.", + }, + "traceback": { + "type": ["string", "null"], + "description": "he traceback stack if an error occurred in this event. Policy defaults enable only users with the administrative role to see an instance action event traceback. Cloud providers can change these permissions through the policy.json file.", + }, + "hostId": { + "type": "string", + "description": "An obfuscated hashed host ID string, or the empty string if there is no host for the event. This is a hashed value so will not actually look like a hostname, and is hashed with data from the project_id, so the same physical host as seen by two different project_ids will be different. This is useful when within the same project you need to determine if two events occurred on the same or different physical hosts.", + "x-openstack": {"min-ver": "2.62"}, + }, + "host": { + "type": "string", + "description": "The name of the host on which the event occurred. Policy defaults enable only users with the administrative role to see an instance action event host. Cloud providers can change these permissions through the policy.json file.", + "x-openstack": {"min-ver": "2.62"}, + }, + "details": { + "type": ["string", "null"], + "description": "Details of the event. May be null.", + "x-openstack": {"min-ver": "2.84"}, + }, + }, + }, + "description": "Events", + }, + "message": { + "type": ["string", "null"], + "description": "The related error message for when an action fails.", + }, + "project_id": { + "type": "string", + "description": "The ID of the project that this server belongs to.", + }, + "request_id": { + "type": "string", + "description": "The ID of the request that this action related to.", + }, + "start_time": { + "type": "string", + "format": "date-time", + "description": "The date and time when the action was started.", + }, + "user_id": { + "type": "string", + "description": "The ID of the user which initiated the server action.", + }, + "updated_at": { + "type": "string", + "format": "date-time", + "description": "The date and time when the instance action or the action event of instance action was updated.", + "x-openstack": {"min-ver": "2.58"}, + }, + }, +} +SERVER_INSTANCE_ACTION_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"instanceAction": SERVER_INSTANCE_ACTION_SCHEMA}, +} +SERVER_INSTANCE_ACTION_LIST_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "instanceActions": { + "type": "array", + "items": SERVER_INSTANCE_ACTION_SCHEMA, + "description": "List of the actions for the given instance in descending order of creation.", + }, + "links": LINKS_SCHEMA, + }, +} + +INTERFACE_ATTACHMENT_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "The interface attachment.", + "properties": { + "fixed_ips": { + "type": "array", + "description": "Fixed IP addresses with subnet IDs.", + "items": { + "type": "object", + "properties": { + "ip_address": { + "description": "The IP address.", + **parameter_types.ip_address, + }, + "subnet_id": { + "type": "string", + "format": "uuid", + "description": "The UUID of the subnet.", + }, + }, + }, + }, + "mac_addr": { + "description": "The MAC address", + **parameter_types.mac_address, + }, + "net_id": { + "type": "string", + "format": "uuid", + "description": "The network ID.", + }, + "port_id": { + "type": "string", + "format": "uuid", + "description": "The port ID.", + }, + "port_state": {"type": "string", "description": "The port state."}, + "tag": { + "type": ["string", "null"], + "description": "The device tag applied to the virtual network interface or null.", + "x-openstack": {"min-ver": "2.70"}, + }, + }, +} +INTERFACE_ATTACHMENT_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"interfaceAttachment": INTERFACE_ATTACHMENT_SCHEMA}, +} +INTERFACE_ATTACHMENT_LIST_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "interfaceAttachments": { + "type": "array", + "items": INTERFACE_ATTACHMENT_SCHEMA, + } + }, +} + +SERVER_PASSWORD_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "password": { + "type": "string", + "format": "password", + "description": "The password returned from metadata server.", + } + }, +} +SERVER_SECURITY_GROUPS_LIST_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "security_groups": { + "type": "array", + "items": { + "type": "object", + "properties": { + "id": { + "type": "string", + "format": "uuid", + "description": "The ID of the security group.", + }, + "name": { + "type": "string", + "description": "The security group name.", + }, + "description": { + "type": "string", + "description": "Security group description.", + }, + "tenant_id": { + "type": "string", + "format": "uuid", + "description": "The UUID of the tenant in a multi-tenancy cloud.", + }, + "rules": { + "type": "array", + "description": "The list of security group rules.", + "items": { + "type": "object", + "properties": { + "id": {"type": "string", "format": "uuid"}, + "from_port": {"type": "integer"}, + "to_port": {"type": "integer"}, + "ip_protocol": {"type": "string"}, + "ip_range": {"type": "object"}, + "group": { + "type": "object", + "properties": {"name": {"type": "string"}}, + }, + "parent_group_id": { + "type": "string", + "format": "uuid", + }, + }, + }, + }, + }, + }, + }, + }, +} + + +VOLUME_ATTACHMENT_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "device": { + "type": "string", + "description": "Name of the device in the attachment object, such as, /dev/vdb.", + }, + "id": { + "description": "The volume ID of the attachment", + "type": "string", + "format": "uuid", + "x-openstack": {"max-ver": "2.88"}, + }, + "serverId": { + "type": "string", + "format": "uuid", + "description": "The UUID of the server.", + }, + "volumeId": { + "type": "string", + "format": "uuid", + "description": "The UUID of the attached volume.", + }, + "tag": { + "type": ["string", "null"], + "description": "The device tag applied to the volume block device or null.", + "x-openstack": {"min-ver": "2.70"}, + }, + "delete_on_termination": { + "type": "boolean", + "description": "A flag indicating if the attached volume will be deleted when the server is deleted.", + "x-openstack": {"min-ver": "2.79"}, + }, + "attachment_id": { + "type": "string", + "format": "uuid", + "description": "The UUID of the associated volume attachment in Cinder.", + "x-openstack": {"min-ver": "2.89"}, + }, + "bdm_uuid": { + "type": "string", + "format": "uuid", + "description": "The UUID of the block device mapping record in Nova for the attachment.", + "x-openstack": {"min-ver": "2.89"}, + }, + }, +} +VOLUME_ATTACHMENT_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": {"volumeAttachment": VOLUME_ATTACHMENT_SCHEMA}, +} + +VOLUME_ATTACHMENT_LIST_SCHEMA: dict[str, Any] = { + "type": "object", + "properties": { + "volumeAttachments": { + "type": "array", + "items": VOLUME_ATTACHMENT_SCHEMA, + } + }, +} + +EXTENSION_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "An extension object.", + "properties": { + "alias": { + "type": "string", + "description": "A short name by which this extension is also known.", + }, + "description": { + "type": "string", + "description": "Text describing this extension’s purpose.", + }, + "links": copy.deepcopy(LINKS_SCHEMA), + "name": {"type": "string", "description": "Name of the extension."}, + "namespace": { + "type": "string", + "description": "A URL pointing to the namespace for this extension.", + }, + "updated": { + "type": "string", + "format": "date-time", + "description": "The date and time when the resource was updated.", + }, + }, +} + +EXTENSION_CONTAINER_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "An extension object.", + "properties": {"extension": copy.deepcopy(EXTENSION_SCHEMA)}, +} + +EXTENSION_LIST_SCHEMA: dict[str, Any] = { + "type": "object", + "description": "An extension object.", + "properties": { + "extensions": { + "type": "array", + "items": copy.deepcopy(EXTENSION_SCHEMA), + } + }, +} diff --git a/codegenerator/openapi/octavia.py b/codegenerator/openapi/octavia.py new file mode 100644 index 0000000..596fda6 --- /dev/null +++ b/codegenerator/openapi/octavia.py @@ -0,0 +1,392 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import inspect +from multiprocessing import Process +from pathlib import Path +from unittest import mock + +import fixtures + +from codegenerator.common.schema import SpecSchema +from codegenerator.openapi.base import OpenStackServerSourceBase +from codegenerator.openapi.utils import merge_api_ref_doc + +from ruamel.yaml.scalarstring import LiteralScalarString + + +class OctaviaGenerator(OpenStackServerSourceBase): + URL_TAG_MAP = { + "/lbaas/listeners": "listeners", + "/lbaas/loadbalancers": "load-balancers", + "/lbaas/pools/{pool_id}/members": "members", + "/lbaas/pools": "pools", + "/lbaas/healthmonitors": "healthmonitors", + "/lbaas/l7policies/{l7policy_id}/rules": "l7-rules", + "/lbaas/l7policies": "l7-policies", + "/lbaas/quotas": "quotas", + "/lbaas/providers": "providers", + "/lbaas/flavorprofiles": "flavor-profiles", + "/lbaas/flavors": "flavors", + "/lbaas/availabilityzoneprofiles": "avaiability-zone-profiles", + "/lbaas/availabilityzones": "avaiability-zones", + "/lbaas/amphorae": "amphorae", + "/octavia/amphorae": "amphorae", + } + + def __init__(self): + self.api_version = "2.27" + self.min_api_version = "2.0" + + def _fake_create_transport(self, url): + import oslo_messaging as messaging + from oslo_config import cfg + + if url not in self._buses: + self._buses[url] = messaging.get_rpc_transport(cfg.CONF, url=url) + return self._buses[url] + + def _api_ver_major(self, ver): + return ver.ver_major + + def _api_ver_minor(self, ver): + return ver.ver_minor + + def _api_ver(self, ver): + return (ver.ver_major, ver.ver_minor) + + def _build_routes(self, mapper, node, path=""): + for part in [x for x in dir(node) if callable(getattr(node, x))]: + # Iterate over functions to find what is exposed on the current + # level + obj = getattr(node, part) + _pecan = getattr(obj, "_pecan", None) + exposed = getattr(obj, "exposed", None) + if _pecan and exposed: + # Only whatever is pecan exposed is of interest + conditions = {} + action = None + url = path + resource = None + parent = url.split("/")[-1] + # Construct resource name from the path + if parent.endswith("ies"): + resource = parent[0 : len(parent) - 3] + "y" + else: + resource = parent[0:-1] + if path.startswith("/v2/lbaas/quotas"): + # Hack path parameter name for quotas + resource = "project" + # Identify the action from function name + # https://pecan.readthedocs.io/en/latest/rest.html#url-mapping + if part == "get_one": + conditions["method"] = ["GET"] + action = "show" + url += f"/{{{resource}_id}}" + elif part == "get_all": + conditions["method"] = ["GET"] + action = "list" + elif part == "get": + conditions["method"] = ["GET"] + action = "get" + # "Get" is tricky, it can be normal and root, so need to inspect params + sig = inspect.signature(obj) + for pname, pval in sig.parameters.items(): + if "id" in pname and pval.default == pval.empty: + url += f"/{{{resource}_id}}" + elif part == "post": + conditions["method"] = ["POST"] + action = "create" + # url += f"/{{{resource}_id}}" + elif part == "put": + conditions["method"] = ["PUT"] + action = "update" + url += f"/{{{resource}_id}}" + elif part == "delete": + conditions["method"] = ["DELETE"] + action = "delete" + url += f"/{{{resource}_id}}" + + if action: + # If we identified method as "interesting" register it into + # the routes mapper + mapper.connect( + None, + url, + controller=obj, + action=action, + conditions=conditions, + ) + # yield part + if not hasattr(node, "__dict__"): + return + for subcontroller, v in node.__dict__.items(): + # Iterate over node attributes for subcontrollers + if subcontroller in [ + "repositories", + "cert_manager", + "__wrapped__", + ]: + # Not underested in those + continue + subpath = f"{path}/{subcontroller}" + self._build_routes(mapper, v, subpath) + + return + + def generate(self, target_dir, args): + proc = Process(target=self._generate, args=[target_dir, args]) + proc.start() + proc.join() + if proc.exitcode != 0: + raise RuntimeError("Error generating Octavia OpenAPI schma") + return Path(target_dir, "openapi_specs", "load-balancing", "v2.yaml") + + def _generate(self, target_dir, args): + from octavia.api import root_controller + from octavia.common import config, rpc + from octavia.api.v2.controllers import amphora + from octavia.api.v2.controllers import l7rule + from octavia.api.v2.controllers import listener + from octavia.api.v2.controllers import load_balancer + from octavia.api.v2.controllers import member + from octavia.api.v2.controllers import provider + from oslo_config import cfg + + # import oslo_messaging as messaging + from oslo_messaging import conffixture as messaging_conffixture + from pecan import make_app as pecan_make_app + from routes import Mapper + + work_dir = Path(target_dir) + work_dir.mkdir(parents=True, exist_ok=True) + + impl_path = Path( + work_dir, "openapi_specs", "load-balancing", "v2.yaml" + ) + impl_path.parent.mkdir(parents=True, exist_ok=True) + openapi_spec = self.load_openapi(Path(impl_path)) + if not openapi_spec: + openapi_spec = SpecSchema( + info=dict( + title="OpenStack Load Balancing API", + description=LiteralScalarString( + "Load Balancing API provided by Octavia service" + ), + version=self.api_version, + ), + openapi="3.1.0", + security=[{"ApiKeyAuth": []}], + components=dict( + securitySchemes={ + "ApiKeyAuth": { + "type": "apiKey", + "in": "header", + "name": "X-Auth-Token", + } + }, + ), + ) + config.register_cli_opts() + + self._buses = {} + + self.messaging_conf = messaging_conffixture.ConfFixture(cfg.CONF) + self.messaging_conf.transport_url = "fake:/" + self.useFixture(self.messaging_conf) + self.useFixture( + fixtures.MonkeyPatch( + "octavia.common.rpc.create_transport", + self._fake_create_transport, + ) + ) + with mock.patch("octavia.common.rpc.get_transport_url") as mock_gtu: + mock_gtu.return_value = None + rpc.init() + + self.app = pecan_make_app(root_controller.RootController()) + self.root = self.app.application.root + + mapper = Mapper() + + self._build_routes(mapper, self.root) + # Additional amphora routes + mapper.connect( + None, + "/v2/octavia/amphorae/{amphora_id}/stats", + controller=amphora.AmphoraStatsController.get, + action="stats", + conditions={"method": ["GET"]}, + ) + mapper.connect( + None, + "/v2/octavia/amphorae/{amphora_id}/config", + controller=amphora.AmphoraUpdateController.put, + action="config", + conditions={"method": ["PUT"]}, + ) + mapper.connect( + None, + "/v2/octavia/amphorae/{amphora_id}/failover", + controller=amphora.FailoverController.put, + action="failover", + conditions={"method": ["PUT"]}, + ) + # Additional AZ routes + mapper.connect( + None, + "/v2/lbaas/providers/{provider}/flavor_capabilities", + controller=provider.FlavorCapabilitiesController.get_all, + action="flavor_capabilities", + conditions={"method": ["GET"]}, + ) + mapper.connect( + None, + "/v2/lbaas/providers/{provider}/availability_zone_capabilities", + controller=provider.AvailabilityZoneCapabilitiesController.get_all, + action="az_capabilities", + conditions={"method": ["GET"]}, + ) + # L7Rules routes + mapper.connect( + None, + "/v2/lbaas/l7policies/{l7policy_id}/rules", + controller=l7rule.L7RuleController.get_all, + action="index", + conditions={"method": ["GET"]}, + ) + mapper.connect( + None, + "/v2/lbaas/l7policies/{l7policy_id}/rules", + controller=l7rule.L7RuleController.post, + action="create", + conditions={"method": ["POST"]}, + ) + mapper.connect( + None, + "/v2/lbaas/l7policies/{l7policy_id}/rules/{rule_id}", + controller=l7rule.L7RuleController.get, + action="create", + conditions={"method": ["GET"]}, + ) + mapper.connect( + None, + "/v2/lbaas/l7policies/{l7policy_id}/rules/{rule_id}", + controller=l7rule.L7RuleController.put, + action="update", + conditions={"method": ["PUT"]}, + ) + mapper.connect( + None, + "/v2/lbaas/l7policies/{l7policy_id}/rules/{rule_id}", + controller=l7rule.L7RuleController.delete, + action="delete", + conditions={"method": ["DELETE"]}, + ) + # Pool Member routes + mapper.connect( + None, + "/v2/lbaas/pools/{pool_id}/members", + controller=member.MemberController.get_all, + action="index", + conditions={"method": ["GET"]}, + ) + mapper.connect( + None, + "/v2/lbaas/pools/{pool_id}/members", + controller=member.MemberController.post, + action="create", + conditions={"method": ["POST"]}, + ) + mapper.connect( + None, + "/v2/lbaas/pools/{pool_id}/members", + controller=member.MembersController.put, + action="create", + conditions={"method": ["PUT"]}, + ) + mapper.connect( + None, + "/v2/lbaas/pools/{pool_id}/members/{member_id}", + controller=member.MemberController.get, + action="create", + conditions={"method": ["GET"]}, + ) + mapper.connect( + None, + "/v2/lbaas/pools/{pool_id}/members/{member_id}", + controller=member.MemberController.put, + action="update", + conditions={"method": ["PUT"]}, + ) + mapper.connect( + None, + "/v2/lbaas/pools/{pool_id}/members/{member_id}", + controller=member.MemberController.delete, + action="delete", + conditions={"method": ["DELETE"]}, + ) + # Listener stat + mapper.connect( + None, + "/v2/lbaas/listeners/{listener_id}/stats", + controller=listener.StatisticsController.get, + action="stats", + conditions={"method": ["GET"]}, + ) + # Loadbalancer OPs stat + mapper.connect( + None, + "/v2/lbaas/loadbalancers/{loadbalancer_id}/stats", + controller=load_balancer.StatisticsController.get, + action="stats", + conditions={"method": ["GET"]}, + ) + mapper.connect( + None, + "/v2/lbaas/loadbalancers/{loadbalancer_id}/status", + controller=load_balancer.StatusController.get, + action="status", + conditions={"method": ["GET"]}, + ) + mapper.connect( + None, + "/v2/lbaas/loadbalancers/{loadbalancer_id}/statuses", + controller=load_balancer.StatusController.get, + action="status", + conditions={"method": ["GET"]}, + ) + mapper.connect( + None, + "/v2/lbaas/loadbalancers/{loadbalancer_id}/failover", + controller=load_balancer.FailoverController.put, + action="failover", + conditions={"method": ["PUT"]}, + ) + + for route in mapper.matchlist: + # Only generate docs for "/v2/lbaas" and "/v2/octavia" + if not ( + route.routepath.startswith("/v2/lbaas") + or route.routepath.startswith("/v2/octavia") + ): + continue + self._process_route(route, openapi_spec, framework="pecan") + + if args.api_ref_src: + merge_api_ref_doc( + openapi_spec, args.api_ref_src, allow_strip_version=False + ) + + self.dump_openapi(openapi_spec, Path(impl_path), args.validate) + + return impl_path diff --git a/codegenerator/openapi/placement.py b/codegenerator/openapi/placement.py new file mode 100644 index 0000000..240c64f --- /dev/null +++ b/codegenerator/openapi/placement.py @@ -0,0 +1,108 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +from multiprocessing import Process +from pathlib import Path + +from ruamel.yaml.scalarstring import LiteralScalarString + +from codegenerator.common.schema import ( + SpecSchema, +) +from codegenerator.openapi.base import OpenStackServerSourceBase +from codegenerator.openapi.utils import merge_api_ref_doc + + +class PlacementGenerator(OpenStackServerSourceBase): + URL_TAG_MAP = { + "/versions": "version", + } + + def _api_ver_major(self, ver): + return ver.ver_major + + def _api_ver_minor(self, ver): + return ver.ver_minor + + def _api_ver(self, ver): + return (ver.ver_major, ver.ver_minor) + + def _generate(self, target_dir, args): + from oslo_config import cfg + from oslo_config import fixture as config_fixture + + from placement import microversion + from placement import handler + from placement import conf + + self.api_version = microversion.max_version_string() + self.min_api_version = microversion.min_version_string() + + config = cfg.ConfigOpts() + conf_fixture = self.useFixture(config_fixture.Config(config)) + conf.register_opts(conf_fixture.conf) + handler = handler.PlacementHandler(config=conf_fixture.conf) + + self.router = handler._map + + work_dir = Path(target_dir) + work_dir.mkdir(parents=True, exist_ok=True) + + impl_path = Path(work_dir, "openapi_specs", "placement", "v1.yaml") + impl_path.parent.mkdir(parents=True, exist_ok=True) + + openapi_spec = self.load_openapi(impl_path) + if not openapi_spec: + openapi_spec = SpecSchema( + info=dict( + title="OpenStack Placement API", + description=LiteralScalarString( + "Placement API provided by Placement service" + ), + version=self.api_version, + ), + openapi="3.1.0", + security=[{"ApiKeyAuth": []}], + components=dict( + securitySchemes={ + "ApiKeyAuth": { + "type": "apiKey", + "in": "header", + "name": "X-Auth-Token", + } + }, + ), + ) + + for route in self.router.matchlist: + self._process_route(route, openapi_spec) + + self._sanitize_param_ver_info(openapi_spec, self.min_api_version) + + if args.api_ref_src: + merge_api_ref_doc( + openapi_spec, + args.api_ref_src, + allow_strip_version=False, + ) + + self.dump_openapi(openapi_spec, impl_path, args.validate) + + return impl_path + + def generate(self, target_dir, args): + proc = Process(target=self._generate, args=[target_dir, args]) + proc.start() + proc.join() + if proc.exitcode != 0: + raise RuntimeError("Error generating Placement OpenAPI schema") + return Path(target_dir, "openapi_specs", "placement", "v2.yaml") diff --git a/codegenerator/openapi/utils.py b/codegenerator/openapi/utils.py new file mode 100644 index 0000000..80f8ee0 --- /dev/null +++ b/codegenerator/openapi/utils.py @@ -0,0 +1,533 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import logging +import re + +from bs4 import BeautifulSoup +from codegenerator.common.schema import TypeSchema +from markdownify import markdownify as md +from ruamel.yaml.scalarstring import LiteralScalarString + +# import jsonref + + +def merge_api_ref_doc( + openapi_spec, api_ref_src, allow_strip_version=True, doc_url_prefix="" +): + """Merge infomation from rendered API-REF html into the spec + + :param openapi_spec: OpenAPI spec + :param api_ref_src: path to the rendered API-REF + :param bool allow_strip_version: Strip version prefix from the spec path if no direct match is found + :param doc_ver_prefix: Use additional path prefix to find url match + + """ + # Set of processed operationIds. + processed_operations: set[str] = set() + with open(api_ref_src, "r") as fp: + html_doc = fp.read() + + # openapi_spec = jsonref.replace_refs(openapi_spec) + + soup = BeautifulSoup(html_doc, "html.parser") + docs_title = soup.find("div", class_="docs-title") + title = None + if docs_title: + title = docs_title.find("h1").string + main_body = soup.find("div", class_="docs-body") + for section in main_body.children: + if section.name != "section": + continue + section_id = section["id"] + section_title = section.find("h1") + + if section_title.string: + sec_title = section_title.string + else: + sec_title = list(section_title.strings)[0] + sec_descr = get_sanitized_description(str(section.p)) + if sec_title == title: + openapi_spec.info["description"] = sec_descr + else: + for tag in openapi_spec.tags: + if tag["name"] == section_id: + tag["description"] = sec_descr + # TODO(gtema): notes are aside of main "p" and not + # underneath + # Iterate over URLs + operation_url_containers = section.find_all( + "div", class_="operation-grp" + ) + for op in operation_url_containers: + ep = op.find("div", class_="endpoint-container") + ep_divs = ep.find_all("div") + url = doc_url_prefix + "".join(ep_divs[0].strings) + summary = "".join(ep_divs[1].strings) + method_span = op.find("div", class_="operation").find( + "span", class_="label" + ) + method = method_span.string + + # Find operation + path_spec = openapi_spec.paths.get(url) + if ( + url not in openapi_spec.paths + and url.startswith("/v") + and allow_strip_version + ): + # There is no direct URL match, but doc URL starts with /vXX - try searching without version prefix + m = re.search(r"^\/v[0-9.]*(\/.*)", url) + if m and m.groups(): + url = m.group(1) + path_spec = openapi_spec.paths.get(url) + + doc_source_param_mapping = {} + if not path_spec: + if "{" in url: + # The url contain parameters. It can be the case that + # parameter names are just different between source and + # docs + for existing_path in openapi_spec.paths.keys(): + existing_path_parts = existing_path.split("/") + doc_url_parts = url.split("/") + if len(existing_path_parts) != len(doc_url_parts): + # Paths have different length. Skip + continue + is_search_aborted = False + for source, doc in zip( + existing_path_parts, doc_url_parts + ): + source_ = source.strip("{}") + doc_ = doc.strip("{}") + if ( + source != doc + and source.startswith("{") + and doc.startswith("{") + and source_ != doc_ + ): + # Path parameter on both sides. Consider renamed parameter + doc_source_param_mapping[doc_] = source_ + elif source != doc: + # Path differs. No point in looking further + is_search_aborted = True + break + if is_search_aborted: + continue + # Assume we found something similar. Try to + # construct url with renames and compare it again. + # It should not be necessary, but it states: "safe is safe" + modified_url_parts = [] + for part in url.split("/"): + if part.startswith("{"): + doc_param_name = part.strip("{}") + modified_url_parts.append( + "{" + + doc_source_param_mapping.get( + doc_param_name, doc_param_name + ) + + "}" + ) + else: + modified_url_parts.append(part) + if "/".join(modified_url_parts) == existing_path: + # Is a definitive match + path_spec = openapi_spec.paths[existing_path] + break + + if not path_spec: + logging.info("Cannot find path %s in the spec" % url) + continue + + op_spec = getattr(path_spec, method.lower(), None) + if not op_spec: + logging.warn( + "Cannot find %s operation for %s in the spec" + % (method, url) + ) + continue + + if ( + op_spec.operationId in processed_operations + and not url.endswith("/action") + ): + # Do not update operation we have already processed + continue + else: + processed_operations.add(op_spec.operationId) + + # Find the button in the operaion container to get ID of the + # details section + details_button = op.find("button") + details_section_id = details_button["data-target"].strip("#") + details_section = section.find("section", id=details_section_id) + description = [] + action_name = None + # Gather description section paragraphs to construct operation description + for details_child in details_section.children: + if details_child.name == "p": + description.append(str(details_child)) + + elif details_child.name == "section": + if ( + details_child.h3 + and "Request" in details_child.h3.strings + ) or ( + details_child.h4 + and "Request" in details_child.h4.strings + ): + # Found request details + if not details_child.table: + logging.warn( + "No Parameters description table found for %s:%s in html", + url, + method, + ) + + continue + logging.debug( + "Processing Request parameters for %s:%s", + url, + method, + ) + + spec_body = ( + op_spec.requestBody.get("content", {}) + .get("application/json", {}) + .get("schema") + ) + if not spec_body: + logging.debug( + "No request body present in the spec for %s:%s", + url, + method, + ) + continue + (schema_specs, action_name) = _get_schema_candidates( + openapi_spec, + url, + spec_body, + action_name, + summary, + description, + ) + + _doc_process_operation_table( + details_child.table.tbody, + openapi_spec, + op_spec, + schema_specs, + doc_source_param_mapping, + ) + + if url.endswith("/action"): + for sch in schema_specs: + sch.summary = summary + # Neutron sometimes has h4 instead of h3 and "Response Parameters" instead of "Response" + elif ( + details_child.h3 + and ( + "Response" in details_child.h3.strings + or "Response Parameters" + in details_child.h3.strings + ) + ) or ( + details_child.h4 + and ( + "Response" in details_child.h4.strings + or "Response Parameters" + in details_child.h4.strings + ) + ): + # Found response details + if not details_child.table: + logging.warn( + "No Response Parameters description table found for %s:%s in html", + url, + method, + ) + + continue + logging.debug( + "Processing Response parameters for %s:%s", + url, + method, + ) + + spec_body = None + for rc in op_spec.responses: + # TODO(gtema): what if we have multiple positive RCs? + if rc.startswith("20"): + spec_body = ( + op_spec.responses[rc] + .get("content", {}) + .get("application/json", {}) + .get("schema") + ) + if not spec_body: + logging.info( + "Operation %s has no response body according to the spec", + op_spec.operationId, + ) + continue + (schema_specs, action_name) = _get_schema_candidates( + openapi_spec, url, spec_body, action_name + ) + try: + _doc_process_operation_table( + details_child.table.tbody, + openapi_spec, + op_spec, + schema_specs, + doc_source_param_mapping, + ) + except Exception: + # No luck processing it as parameters table + pass + + if not url.endswith("/action"): + pass + # This is not an "action" which combines various + # operations, so no summary/description info + op_spec.summary = summary + op_spec.description = get_sanitized_description( + "".join(description) + ) + + +def _doc_process_operation_table( + tbody, + openapi_spec, + op_spec, + schema_specs, + doc_source_param_mapping, +): + """Process DOC table (Request/Reseponse) and try to set description to + the matching schema property""" + + logging.debug("Processing %s", schema_specs) + for row in tbody.find_all("tr"): + tds = row.find_all("td") + doc_param_name = tds[0].p.string.replace(" (Optional)", "") + doc_param_location = tds[1].p.string + # doc_param_type = tds[2].p.string + doc_param_descr = get_sanitized_description( + "".join(str(x) for x in tds[3].contents).strip("\n ") + ) + if doc_param_location in ["query", "header", "path"]: + for src_param in op_spec.parameters: + if src_param.ref: + pname = src_param.ref.split("/")[-1] + param_def = openapi_spec.components.parameters.get( + doc_source_param_mapping.get(pname, pname) + ) + else: + param_def = src_param + if not param_def: + logging.warn("Cannot find parameter %s", src_param) + + if ( + param_def.location == doc_param_location + and param_def.name == doc_param_name + ): + param_def.description = LiteralScalarString( + doc_param_descr + ) + elif doc_param_location == "body": + # Body param. Traverse through body information + for schema in schema_specs: + prop = _find_schema_property(schema, doc_param_name) + if prop: + if hasattr(prop, "description"): + prop.description = doc_param_descr + else: + prop["description"] = doc_param_descr + pass + + +def _find_schema_property(schema, target_prop_name): + if not schema: + return + # logging.debug("Searching %s in %s", target_prop_name, schema) + xtype = schema["type"] if isinstance(schema, dict) else schema.type + if xtype == "object": + if isinstance(schema, TypeSchema): + props = schema.properties + elif isinstance(schema, dict): + props = schema.get("properties", {}) + if not props: + return + for prop_name, prop_def in props.items(): + prop_type = ( + prop_def.get("type") + if isinstance(prop_def, dict) + else prop_def.type + ) + if prop_name == target_prop_name: + return prop_def + elif ( + "." in target_prop_name + and target_prop_name.startswith(prop_name) + and prop_type == "object" + ): + # block_device_mapping_v2.tag like pattern + candidate = _find_schema_property( + prop_def, target_prop_name[len(prop_name) + 1 :] + ) + if candidate: + return candidate + elif prop_type == "object": + # name under the "server" + candidate = _find_schema_property(prop_def, target_prop_name) + if candidate: + return candidate + elif prop_type == "array": + # name under the "server" + candidate = _find_schema_property( + ( + prop_def.get("items") + if isinstance(prop_def, dict) + else prop_def.items + ), + target_prop_name, + ) + if candidate: + return candidate + + elif xtype == "array": + items_schema = ( + schema.items + if isinstance(schema, TypeSchema) + else schema.get("items") + ) + candidate = _find_schema_property(items_schema, target_prop_name) + if candidate: + return candidate + + +def get_schema(openapi_spec, ref): + """Resolve schema reference""" + if isinstance(ref, TypeSchema): + xref = ref.ref + elif isinstance(ref, str): + xref = ref + elif isinstance(ref, dict): + xref = ref.get("$ref") + if xref: + return openapi_spec.components.schemas.get(xref.split("/")[-1]) + else: + return ref + + +def _get_schema_candidates( + openapi_spec, + url, + spec_body, + action_name=None, + section_summary=None, + section_description=None, +): + schema_specs = [] + if isinstance(spec_body, TypeSchema): + ref = spec_body.ref + oneOf = spec_body.oneOf + else: + ref = spec_body.get("$ref") + oneOf = spec_body.get("oneOf") + if spec_body and ref: + candidate_schema = openapi_spec.components.schemas.get( + ref.split("/")[-1] + ) + if candidate_schema.oneOf: + for x in candidate_schema.oneOf: + ref = x.get("$ref") if isinstance(x, dict) else x.ref + xtype = x.get("type") if isinstance(x, dict) else x.type + # if isinstance(x, TypeSchema) and not x.get("$ref"): + # continue + if ref: + schema_specs.append( + openapi_spec.components.schemas.get(ref.split("/")[-1]) + ) + elif xtype: + # xtype is just to check that the + # schema is not a ref and not empty + schema_specs.append(x) + else: + schema_specs.append(candidate_schema) + + elif spec_body and oneOf: + for x in oneOf: + res = get_schema(openapi_spec, x) + + if url.endswith("/action"): + # For the actions we search for the + # matching entity + candidate_action_name = None + if isinstance(res, TypeSchema): + ext = res.openstack + else: + ext = res.get("x-openstack") + if ext: + candidate_action_name = ext.get("action-name") + if not candidate_action_name: + # Not able to figure out action name, abort + continue + + if candidate_action_name == action_name: + # We know which action we are searching for (most likely we process reponse + schema_specs.append(res) + + elif not action_name and section_description: + if candidate_action_name and ( + re.search( + rf"\b{candidate_action_name}\b", section_summary + ) + or ( + url.endswith("/volumes/{volume_id}/action") + # Cinder doc does not contain action name in the + # summary, but looking only to description causes + # faulty matches in Nova + and re.search( + rf"\b{candidate_action_name}\b", + section_description, + ) + ) + ): + # This is an action we are hopefully interested in + # Now we can have single schema or multiple (i.e. microversions) + if isinstance(res, TypeSchema): + itms = res.oneOf + elif isinstance(res, dict): + itms = res.get("oneOf") + if itms: + for itm in itms: + schema_specs.append( + get_schema(openapi_spec, itm) + ) + schema_specs.append(res) + # Set the action name. Since + # Request normally comes before + # the response we can reuse it + # later. + action_name = candidate_action_name + res.description = get_sanitized_description( + "".join(section_description) + ) + + else: + schema_specs.append(res) + + return (schema_specs, action_name) + + +def get_sanitized_description(descr: str) -> LiteralScalarString: + return LiteralScalarString(md(descr, escape_underscores=False).rstrip()) diff --git a/codegenerator/openapi_spec.py b/codegenerator/openapi_spec.py new file mode 100644 index 0000000..a12c65e --- /dev/null +++ b/codegenerator/openapi_spec.py @@ -0,0 +1,90 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +import logging + +from codegenerator.base import BaseGenerator + + +class OpenApiSchemaGenerator(BaseGenerator): + def __init__(self): + super().__init__() + + def get_parser(self, parser): + parser.add_argument( + "--api-ref-src", + help="Path to the rendered api-ref html to extract descriptions", + ) + return parser + + def generate_nova(self, target_dir, args): + from codegenerator.openapi.nova import NovaGenerator + + NovaGenerator().generate(target_dir, args) + + def generate_cinder(self, target_dir, args): + from codegenerator.openapi.cinder import CinderV3Generator + + CinderV3Generator().generate(target_dir, args) + + def generate_glance(self, target_dir, args): + from codegenerator.openapi.glance import GlanceGenerator + + GlanceGenerator().generate(target_dir, args) + + def generate_keystone(self, target_dir, args): + from codegenerator.openapi.keystone import KeystoneGenerator + + KeystoneGenerator().generate(target_dir, args) + + def generate_octavia(self, target_dir, args): + from codegenerator.openapi.octavia import OctaviaGenerator + + OctaviaGenerator().generate(target_dir, args) + + def generate_neutron(self, target_dir, args): + from codegenerator.openapi.neutron import NeutronGenerator + + NeutronGenerator().generate(target_dir, args) + + def generate_placement(self, target_dir, args): + from codegenerator.openapi.placement import PlacementGenerator + + PlacementGenerator().generate(target_dir, args) + + def generate( + self, res, target_dir, openapi_spec=None, operation_id=None, args=None + ): + """Generate Schema definition file for Resource""" + logging.debug("Generating OpenAPI schema data in %s" % target_dir) + # We do not import generators since due to the use of Singletons in the + # code importing glance, nova, cinder at the same time crashes + # dramatically + if args.service_type == "compute": + self.generate_nova(target_dir, args) + elif args.service_type in ["block-storage", "volume"]: + self.generate_cinder(target_dir, args) + elif args.service_type == "image": + self.generate_glance(target_dir, args) + elif args.service_type == "identity": + self.generate_keystone(target_dir, args) + elif args.service_type == "load-balancing": + self.generate_octavia(target_dir, args) + elif args.service_type == "network": + self.generate_neutron(target_dir, args) + elif args.service_type == "placement": + self.generate_placement(target_dir, args) + else: + raise RuntimeError( + "Service type %s is not supported", args.service_type + ) diff --git a/codegenerator/osc.py b/codegenerator/osc.py new file mode 100644 index 0000000..19e21f3 --- /dev/null +++ b/codegenerator/osc.py @@ -0,0 +1,220 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +import logging +from pathlib import Path + +from codegenerator.base import BaseGenerator + + +class OSCGenerator(BaseGenerator): + def __init__(self): + super().__init__() + + def _render_command( + self, + context: dict, + osc_path: list, + impl_template: str, + impl_dest: Path, + test_template: str, + test_dest: Path, + ): + """Render command code""" + self._render(impl_template, context, impl_dest.parent, impl_dest.name) + + unittest_path = test_dest.parent + + unittest_path.mkdir(parents=True, exist_ok=True) + Path(unittest_path, "__init__.py").touch() + + self._render(test_template, context, test_dest.parent, test_dest.name) + + def generate(self, res, target_dir, args=None): + """Generate code for the OpenStackClient""" + logging.debug("Generating OpenStackClient code in %s" % target_dir) + osc_path = res.mod_name.split(".")[1:] + + context = dict( + res=res.resource_class, + sdk_mod_name=res.mod_name, + osc_mod_name=res.mod_name.replace( + "openstack.", "openstackclient." + ), + class_name=res.class_name, + resource_name=res.class_name.lower(), + sdk_service_name=res.service_name, + proxy=res.proxy_obj, + fqcn=res.fqcn, + registry_name=res.registry_name, + attrs=res.attrs, + ) + + work_dir = Path(target_dir) + work_dir.mkdir(parents=True, exist_ok=True) + + # Generate common (i.e. formatters) + impl_path = Path(work_dir, "openstackclient", "/".join(osc_path)) + impl_path.mkdir(parents=True, exist_ok=True) + Path(impl_path, "__init__.py").touch() + self._render( + "osc/impl_common.py.j2", + context, + Path(work_dir, "openstackclient", "/".join(osc_path)), + "common.py", + ) + + if res.resource_class.allow_list: + # Generate methods for the list resources command + self._render_command( + context, + osc_path, + "osc/impl_list.py.j2", + Path( + work_dir, "openstackclient", "/".join(osc_path), "list.py" + ), + "osc/test_unit_list.py.j2", + Path( + work_dir, + "openstackclient", + "tests", + "unit", + "/".join(osc_path), + "test_list.py", + ), + ) + + if res.resource_class.allow_fetch: + # Generate methods for the GET resource command + self._render_command( + context, + osc_path, + "osc/impl_show.py.j2", + Path( + work_dir, "openstackclient", "/".join(osc_path), "show.py" + ), + "osc/test_unit_show.py.j2", + Path( + work_dir, + "openstackclient", + "tests", + "unit", + "/".join(osc_path), + "test_show.py", + ), + ) + + if res.resource_class.allow_create: + # Generate methods for the CREATE resource command + self._render_command( + context, + osc_path, + "osc/impl_create.py.j2", + Path( + work_dir, + "openstackclient", + "/".join(osc_path), + "create.py", + ), + "osc/test_unit_create.py.j2", + Path( + work_dir, + "openstackclient", + "tests", + "unit", + "/".join(osc_path), + "test_create.py", + ), + ) + + if res.resource_class.allow_delete: + # Generate methods for the DELETE resource command + self._render_command( + context, + osc_path, + "osc/impl_delete.py.j2", + Path( + work_dir, + "openstackclient", + "/".join(osc_path), + "delete.py", + ), + "osc/test_unit_delete.py.j2", + Path( + work_dir, + "openstackclient", + "tests", + "unit", + "/".join(osc_path), + "test_delete.py", + ), + ) + + if res.resource_class.allow_commit: + # Generate methods for the UPDATE resource command + self._render_command( + context, + osc_path, + "osc/impl_set.py.j2", + Path( + work_dir, + "openstackclient", + "/".join(osc_path), + "set.py", + ), + "osc/test_unit_set.py.j2", + Path( + work_dir, + "openstackclient", + "tests", + "unit", + "/".join(osc_path), + "test_set.py", + ), + ) + + # Unset command + self._render_command( + context, + osc_path, + "osc/impl_unset.py.j2", + Path( + work_dir, + "openstackclient", + "/".join(osc_path), + "unset.py", + ), + "osc/test_unit_unset.py.j2", + Path( + work_dir, + "openstackclient", + "tests", + "unit", + "/".join(osc_path), + "test_unset.py", + ), + ) + + # Format rendered code to have less flake complains. This will still + # not guarantee code is fitting perfect, since there might be too long + # lines + self._format_code( + Path(work_dir, "openstackclient", "/".join(osc_path)), + Path( + work_dir, + "openstackclient", + "tests", + "unit", + "/".join(osc_path), + ), + ) diff --git a/codegenerator/rust_cli.py b/codegenerator/rust_cli.py new file mode 100644 index 0000000..7137c32 --- /dev/null +++ b/codegenerator/rust_cli.py @@ -0,0 +1,1374 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import logging +from pathlib import Path +import subprocess +import re +from typing import Type + +from codegenerator.base import BaseGenerator +from codegenerator import common +from codegenerator import model +from codegenerator.common import rust as common_rust +from codegenerator.common import BasePrimitiveType +from codegenerator.common import BaseCombinedType +from codegenerator.common import BaseCompoundType + + +BASIC_FIELDS = [ + "id", + "name", + "created_at", + "updated_at", + "uuid", + "state", + "status", +] + + +class BooleanFlag(common_rust.Boolean): + """Boolean parameter that is represented as a CLI flag""" + + type_hint: str = "bool" + clap_macros: set[str] = set(["action=clap::ArgAction::SetTrue"]) + original_data_type: BaseCompoundType | BasePrimitiveType | None = None + + +class String(common_rust.String): + """CLI String type""" + + clap_macros: set[str] = set() + original_data_type: BaseCompoundType | BaseCombinedType | None = None + # Temporary add string enum for parameters which we do not want to handle + # as StringEnums + enum: set[str] | None = None + # imports: set[str] = set(["dialoguer::Password"]) + + @property + def imports(self) -> set[str]: + if self.format and self.format == "password": + return set(["dialoguer::Password"]) + return set([]) + + +class IntString(common.BasePrimitiveType): + """CLI Integer or String""" + + imports: set[str] = set(["crate::common::IntString"]) + type_hint: str = "IntString" + clap_macros: set[str] = set() + + +class NumString(common.BasePrimitiveType): + """CLI Number or String""" + + imports: set[str] = set(["crate::common::NumString"]) + type_hint: str = "NumString" + clap_macros: set[str] = set() + + +class BoolString(common.BasePrimitiveType): + """CLI Boolean or String""" + + imports: set[str] = set(["crate::common::BoolString"]) + type_hint: str = "BoolString" + clap_macros: set[str] = set() + + +class VecString(common.BasePrimitiveType): + """CLI Vector of strings""" + + imports: set[str] = set(["crate::common::VecString"]) + type_hint: str = "VecString" + clap_macros: set[str] = set() + + +class JsonValue(common_rust.JsonValue): + """Arbitrary JSON value""" + + imports: set[str] = set(["crate::common::parse_json", "serde_json::Value"]) + clap_macros: set[str] = set( + ['value_name="JSON"', "value_parser=parse_json"] + ) + original_data_type: BaseCompoundType | BaseCompoundType | None = None + + +class StructInputField(common_rust.StructField): + """Structure field of the CLI input""" + + additional_clap_macros: set[str] = set() + + @property + def type_hint(self): + typ_hint = self.data_type.type_hint + if self.is_optional: + typ_hint = f"Option<{typ_hint}>" + # Password input must be optional + if ( + getattr(self.data_type, "format", None) == "password" + and not self.is_optional + ): + typ_hint = f"Option<{typ_hint}>" + return typ_hint + + @property + def builder_macros(self): + macros: set[str] = set([]) + if not isinstance(self.data_type, BaseCompoundType): + macros.update(self.data_type.builder_macros) + else: + macros.add("setter(into)") + if self.is_optional: + macros.add("default") + return f"#[builder({', '.join(sorted(macros))})]" + + @property + def serde_macros(self): + macros = set([]) + if self.local_name != self.remote_name: + macros.add(f'rename="{self.remote_name}"') + return f"#[serde({', '.join(sorted(macros))})]" + + @property + def clap_macros(self): + if isinstance(self.data_type, common_rust.Struct): + # For substrucs (and maybe enums) we tell Clap to flatten subtype + # instead of exposing attr itself + return "#[command(flatten)]" + if isinstance(self.data_type, common_rust.Option) and isinstance( + self.data_type.item_type, common_rust.Struct + ): + return "#[command(flatten)]" + macros = set(["long"]) + try: + if self.data_type.clap_macros: + macros.update(self.data_type.clap_macros) + # i.e. CLI groups are managed through the code dynamically + macros.update(self.additional_clap_macros) + except Exception as ex: + logging.exception("Error getting clap_macros for %s: %s", self, ex) + return f"#[arg({', '.join(sorted(macros))})]" + + def clap_macros_ext(self, is_group: bool | None = None): + if isinstance(self.data_type, common_rust.Struct): + # For substrucs (and maybe enums) we tell Clap to flatten subtype + # instead of exposing attr itself + return "#[command(flatten)]" + macros = set(["long"]) + if is_group and not self.is_optional: + macros.add("required=false") + try: + if self.data_type.clap_macros: + macros.update(self.data_type.clap_macros) + # i.e. CLI groups are managed through the code dynamically + macros.update(self.additional_clap_macros) + except Exception as ex: + logging.exception("Error getting clap_macros for %s: %s", self, ex) + return f"#[arg({', '.join(sorted(macros))})]" + + +class StructInput(common_rust.Struct): + field_type_class_: Type[common_rust.StructField] = StructInputField + clap_macros: set[str] = set() + original_data_type: BaseCompoundType | BaseCompoundType | None = None + is_group: bool = False + is_required: bool = False + + @property + def imports(self): + imports: set[str] = set(["serde::Deserialize"]) + for field in self.fields.values(): + imports.update(field.data_type.imports) + if self.additional_fields_type: + imports.add("crate::common::parse_key_val") + imports.update(self.additional_fields_type.imports) + return imports + + +class EnumGroupStructInputField(StructInputField): + """Container for complex Enum field""" + + sdk_parent_enum_variant: str | None = None + + +class EnumGroupStruct(common_rust.Struct): + """Container for complex Enum containing Array""" + + field_type_class_: Type[common_rust.StructField] = ( + EnumGroupStructInputField + ) + base_type: str = "struct" + sdk_enum_name: str + is_group: bool = True + is_required: bool = False + + +class StructFieldResponse(common_rust.StructField): + """Response Structure Field""" + + @property + def type_hint(self): + typ_hint = self.data_type.type_hint + if self.is_optional and not typ_hint.startswith("Option<"): + typ_hint = f"Option<{typ_hint}>" + return typ_hint + + @property + def serde_macros(self): + macros = set([]) + if self.local_name != self.remote_name: + macros.add(f'rename="{self.remote_name}"') + return f"#[serde({', '.join(sorted(macros))})]" + + def get_structable_macros( + self, + struct: "StructResponse", + service_name: str, + resource_name: str, + operation_type: str, + ): + macros = set([]) + if self.is_optional: + macros.add("optional") + if self.local_name != self.remote_name: + macros.add(f'title="{self.remote_name}"') + # Fully Qualified Attribute Name + fqan: str = ".".join( + [service_name, resource_name, self.remote_name] + ).lower() + # Check the known alias of the field by FQAN + alias = common.FQAN_ALIAS_MAP.get(fqan) + if operation_type == "list": + if ( + "id" in struct.fields.keys() + and not ( + self.local_name in BASIC_FIELDS or alias in BASIC_FIELDS + ) + ) or ( + "id" not in struct.fields.keys() + and (self.local_name not in list(struct.fields.keys())[-10:]) + ): + # Only add "wide" flag if field is not in the basic fields AND + # there is at least "id" field existing in the struct OR the + # field is not in the first 10 + macros.add("wide") + if self.data_type.type_hint in [ + "Value", + "Option<Value>", + "Vec<Value>", + "Option<Vec<Value>>", + ]: + macros.add("pretty") + return f"#[structable({', '.join(sorted(macros))})]" + + +class StructResponse(common_rust.Struct): + field_type_class_: Type[common_rust.StructField] = StructFieldResponse + + @property + def imports(self): + imports: set[str] = set(["serde::Deserialize"]) + for field in self.fields.values(): + imports.update(field.data_type.imports) + # In difference to the SDK and Input we do not currently handle + # additional_fields of the struct in response + # if self.additional_fields_type: + # imports.add("std::collections::BTreeMap") + # imports.update(self.additional_fields_type.imports) + return imports + + +class TupleStruct(common_rust.Struct): + """Rust tuple struct without named fields""" + + base_type: str = "struct" + tuple_fields: list[common_rust.StructField] = [] + + @property + def imports(self): + imports: set[str] = set([]) + for field in self.tuple_fields: + imports.update(field.data_type.imports) + return imports + + +class DictionaryInput(common_rust.Dictionary): + lifetimes: set[str] = set() + original_data_type: BaseCompoundType | BaseCompoundType | None = None + + @property + def type_hint(self): + return f"Vec<(String, {self.value_type.type_hint})>" + + @property + def imports(self): + imports = set([]) + if not isinstance(self.value_type, common_rust.Option): + imports.add("crate::common::parse_key_val") + else: + imports.add("crate::common::parse_key_val_opt") + imports.update(self.value_type.imports) + return imports + + @property + def clap_macros(self): + macros = set( + [ + "long", + 'value_name="key=value"', + ] + ) + + if not isinstance(self.value_type, common_rust.Option): + macros.add( + f"value_parser=parse_key_val::<String, {self.value_type.type_hint}>", + ) + else: + macros.add( + f"value_parser=parse_key_val_opt::<String, {self.value_type.item_type.type_hint}>", + ) + return macros + + +class StringEnum(common_rust.StringEnum): + imports: set[str] = set(["clap::ValueEnum"]) + + +class ArrayInput(common_rust.Array): + original_data_type: ( + common_rust.BaseCompoundType + | common_rust.BaseCombinedType + | common_rust.BasePrimitiveType + | None + ) = None + + @property + def clap_macros(self): + macros: set[str] = set(["long", "action=clap::ArgAction::Append"]) + macros.update(self.item_type.clap_macros) + return macros + + +class ArrayResponse(common_rust.Array): + """Vector of data for the Reponse + + in the reponse need to be converted to own type to implement Display""" + + @property + def type_hint(self): + return f"Vec{self.item_type.type_hint}" + + +class HashMapResponse(common_rust.Dictionary): + lifetimes: set[str] = set() + + @property + def type_hint(self): + return f"HashMapString{self.value_type.type_hint.replace('<', '').replace('>', '')}" + + @property + def imports(self): + imports = self.value_type.imports + imports.add("std::collections::HashMap") + return imports + + +class CommaSeparatedList(common_rust.CommaSeparatedList): + @property + def type_hint(self): + return f"Vec<{self.item_type.type_hint}>" + + +class RequestParameter(common_rust.RequestParameter): + """OpenAPI request parameter in the Rust form""" + + @property + def clap_macros(self): + macros: set[str] = set() + if not self.is_required: + macros.add("long") + if self.location == "path": + # Sometime there is a collision of path params and body params. + # In order to prevent this force clap arg ID to be prefixed, while + # the value_name is turned back to the expected value + macros.add(f'id = "path_param_{self.local_name}"') + macros.add(f'value_name = "{self.local_name.upper()}"') + elif self.location == "query": + macros.update(self.data_type.clap_macros) + if hasattr(self.data_type, "enum") and self.data_type.enum: + values = ",".join(f'"{x}"' for x in sorted(self.data_type.enum)) + macros.add(f"value_parser = [{values}]") + return f"#[arg({', '.join(sorted(macros))})]" + + +class RequestTypeManager(common_rust.TypeManager): + primitive_type_mapping: dict[ + Type[model.PrimitiveType], Type[BasePrimitiveType] + ] = { + model.PrimitiveString: String, + model.ConstraintString: String, + model.PrimitiveAny: JsonValue, + } + + data_type_mapping: dict[ + Type[model.ADT], Type[BaseCombinedType] | Type[BaseCompoundType] + ] + + data_type_mapping = { + model.Struct: StructInput, + model.Dictionary: DictionaryInput, + model.Array: ArrayInput, + model.CommaSeparatedList: ArrayInput, + model.Set: ArrayInput, + } + + request_parameter_class: Type[common_rust.RequestParameter] = ( + RequestParameter + ) + string_enum_class = StringEnum + + def get_local_attribute_name(self, name: str) -> str: + """Get localized attribute name""" + name = name.replace(".", "_") + attr_name = "_".join( + x.lower() for x in re.split(common.SPLIT_NAME_RE, name) + ) + if attr_name in ["type", "self", "enum", "ref"]: + attr_name = f"_{attr_name}" + return attr_name + + def get_remote_attribute_name(self, name: str) -> str: + """Get the attribute name on the SDK side""" + return self.get_local_attribute_name(name) + + def get_var_name_for(self, obj) -> str: + attr_name = "_".join( + x.lower() for x in re.split(common.SPLIT_NAME_RE, obj.name) + ) + if attr_name in ["type", "self", "enum", "ref"]: + attr_name = f"_{attr_name}" + return attr_name + + def _get_one_of_type( + self, type_model: model.OneOfType + ) -> BaseCompoundType | BaseCombinedType | BasePrimitiveType: + """Convert `model.OneOfType` into Rust model""" + result = super()._get_one_of_type(type_model) + + # Field is of Enum type. + if isinstance(result, common_rust.Enum): + variant_classes = [ + x.data_type.__class__ for x in result.kinds.values() + ] + + if ( + StringEnum in variant_classes + and ArrayInput in variant_classes + and len(variant_classes) == 2 + ): + # There is a StringEnum and Array in the Enum. Clap cannot + # handle it so we convert StringEnum variants into flags + # and keep only rest + # This usecase is here at least to handle server.networks + # which are during creation `none`|`auto`|`JSON` + # On the SDK side where this method is not overriden there + # would be a naming conflict resulting in `set_models` call + # adding type name as a suffix. + sdk_enum_name = result.name + result.__class__.__name__ + obj = EnumGroupStruct( + name=self.get_model_name(type_model.reference), + kinds={}, + sdk_enum_name=sdk_enum_name, + ) + field_class = obj.field_type_class_ + if not type_model.reference: + raise NotImplementedError + name = type_model.reference.name + for k, v in result.kinds.items(): + if isinstance(v.data_type, common_rust.StringEnum): + for x in v.data_type.variants: + field = field_class( + local_name=f"{x.lower()}_{name}", + remote_name=f"{v.data_type.name}::{x}", + sdk_parent_enum_variant=f"{sdk_enum_name}::{k}", + data_type=BooleanFlag(), + is_optional=False, + is_nullable=False, + ) + obj.fields[field.local_name] = field + else: + field = field_class( + local_name=f"{name}", + remote_name=f"{sdk_enum_name}::{k}", + data_type=v.data_type, + is_optional=True, + is_nullable=False, + ) + obj.fields[field.local_name] = field + result = obj + + return result + + def convert_model( + self, + type_model: model.PrimitiveType | model.ADT | model.Reference, + ) -> BasePrimitiveType | BaseCombinedType | BaseCompoundType: + """Get local destination type from the ModelType""" + model_ref: model.Reference | None = None + typ: BasePrimitiveType | BaseCombinedType | BaseCompoundType | None = ( + None + ) + + if isinstance(type_model, model.Reference): + model_ref = type_model + type_model = self._get_adt_by_reference(model_ref) + elif isinstance(type_model, model.ADT): + # Direct composite type + model_ref = type_model.reference + + # CLI hacks + if isinstance(type_model, model.Struct) and not type_model.reference: + # Check the root structure + if len(type_model.fields) == 1: + # Struct with only 1 key + only_field = list(type_model.fields.keys())[0] + if isinstance( + type_model.fields[only_field].data_type, + model.PrimitiveNull, + ): + # The only field is null. No input is necessary + logging.debug( + "API accepts only 1 field of type Null. No input is required." + ) + type_model.fields = {} + if isinstance(type_model, model.Array): + if isinstance(type_model.item_type, model.Reference): + item_type = self._get_adt_by_reference(type_model.item_type) + else: + item_type = type_model.item_type + + if ( + isinstance(item_type, model.Struct) + and len(item_type.fields.keys()) > 1 + ): + # An array of structs with more then 1 field + # Array of Structs can not be handled by the CLI (input). + # Therefore handle underlaying structure as Json saving + # reference to the original "expected" stuff to make final + # input conversion possible + original_data_type = self.convert_model(item_type) + # We are not interested to see unused data in the submodels + self.ignored_models.append(item_type) + # self.ignored_models.extend( + # x.data_type for x in item_type.fields.values() + # ) + typ = self.data_type_mapping[model.Array]( + description=common_rust.sanitize_rust_docstrings( + type_model.description + ), + original_data_type=original_data_type, + item_type=JsonValue(), + ) + elif isinstance(item_type, model.Array) and isinstance( + item_type.item_type, model.ConstraintString + ): + original_data_type = self.convert_model(item_type) + typ = self.data_type_mapping[model.Array]( + description=common_rust.sanitize_rust_docstrings( + type_model.description + ), + original_data_type=original_data_type, + item_type=String(), + ) + + if typ: + if model_ref: + self.refs[model_ref] = typ + else: + # Not hacked anything, invoke superior method + typ = super().convert_model(type_model) + return typ + + def _get_struct_type(self, type_model: model.Struct) -> common_rust.Struct: + """Convert model.Struct into rust_cli `Struct`""" + struct_class = self.data_type_mapping[model.Struct] + mod = struct_class( + name=self.get_model_name(type_model.reference), + description=common_rust.sanitize_rust_docstrings( + type_model.description + ), + ) + field_class = mod.field_type_class_ + for field_name, field in type_model.fields.items(): + is_nullable: bool = False + field_data_type = self.convert_model(field.data_type) + if isinstance(field_data_type, self.option_type_class): + # Unwrap Option into "is_nullable" + # NOTE: but perhaps + # Option<Option> is better (not set vs set explicitly to None + # ) + is_nullable = True + if isinstance( + field_data_type.item_type, + (common_rust.Array, DictionaryInput, String), + ): + # Unwrap Option<Option<...>> + field_data_type = field_data_type.item_type + elif isinstance(field_data_type, EnumGroupStruct): + field_data_type.is_required = field.is_required + f = field_class( + local_name=self.get_local_attribute_name(field_name), + remote_name=self.get_remote_attribute_name(field_name), + description=common_rust.sanitize_rust_docstrings( + field.description + ), + data_type=field_data_type, + is_optional=not field.is_required, + is_nullable=is_nullable, + ) + if mod.name != "Request" and isinstance( + field_data_type, struct_class + ): + field_data_type.is_group = True + field_data_type.is_required = field.is_required + if isinstance(field_data_type, self.option_type_class): + f.is_nullable = True + mod.fields[field_name] = f + # Repeat additional_fields handling as in + # common/rust.py + if type_model.additional_fields: + definition = type_model.additional_fields + # Structure allows additional fields + if isinstance(definition, bool): + mod.additional_fields_type = self.primitive_type_mapping[ + model.PrimitiveAny + ] + else: + mod.additional_fields_type = self.convert_model(definition) + + return mod + + def _get_array_type(self, type_model: model.Array) -> common_rust.Array: + """Convert `model.Array` into corresponding Rust model""" + item_type = self.convert_model(type_model.item_type) + struct_class = self.data_type_mapping[model.Struct] + # item_ref: model.Reference | None = None + # if isinstance(type_model.item_type, model.Reference): + # item_ref = type_model.item_type + # elif hasattr(type_model.item_type, "reference"): + # item_ref = type_model.item_type.reference + if isinstance(item_type, struct_class): + if len(item_type.fields.keys()) == 1: + # Server.security_groups is an object with only name -> simplify + # Only simplify structure with single simple property and name != + # "Request" (root request) + only_field_name = list(item_type.fields.keys())[0] + only_field = item_type.fields[only_field_name] + if not isinstance(only_field.data_type, StructInput): + # If there is only single field in the struct and it is not a + # new struct simplify it. + simplified_data_type = only_field.data_type.model_copy() + simplified_data_type.original_data_type = item_type + logging.debug( + "Replacing single field object %s with %s", + type_model.item_type, + simplified_data_type, + ) + self.ignored_models.append(type_model.item_type) + item_type = simplified_data_type + elif isinstance(item_type, DictionaryInput): + # Array of Freestyle objects in CLI can be only represented as + # array of JsonValue + simplified_data_type = JsonValue() + simplified_data_type.original_data_type = item_type + # self.ignored_models.append(item_ref) + item_type = simplified_data_type + + return self.data_type_mapping[model.Array]( + name=self.get_model_name(type_model.reference), item_type=item_type + ) + + def set_parameters(self, parameters: list[model.RequestParameter]) -> None: + """Set OpenAPI operation parameters into typemanager for conversion""" + super().set_parameters(parameters) + for k, param in self.parameters.items(): + if param.is_flag: + param.data_type = BooleanFlag( + original_data_type=param.data_type, **param.model_dump() + ) + self.parameters[k] = param + + +class ResponseTypeManager(common_rust.TypeManager): + primitive_type_mapping: dict[ + Type[model.PrimitiveType], Type[BasePrimitiveType] + ] = { + model.PrimitiveString: common_rust.String, + model.ConstraintString: common_rust.String, + } + + data_type_mapping = { + model.Struct: StructResponse, + model.Array: JsonValue, + model.Dictionary: JsonValue, + } + + def get_model_name(self, model_ref: model.Reference | None) -> str: + """Get the localized model type name + + In order to avoid collision between structures in request and + response we prefix all types with `Response` + :returns str: Type name + """ + if not model_ref: + return "Response" + return "Response" + "".join( + x.capitalize() + for x in re.split(common.SPLIT_NAME_RE, model_ref.name) + ) + + def convert_model( + self, + type_model: model.PrimitiveType | model.ADT | model.Reference, + ) -> BasePrimitiveType | BaseCombinedType | BaseCompoundType: + """Get local destination type from the ModelType""" + model_ref: model.Reference | None = None + typ: BasePrimitiveType | BaseCombinedType | BaseCompoundType | None = ( + None + ) + if isinstance(type_model, model.Reference): + model_ref = type_model + type_model = self._get_adt_by_reference(model_ref) + elif isinstance(type_model, model.ADT): + # Direct composite type + model_ref = type_model.reference + + # CLI response PRE hacks + if isinstance(type_model, model.Array): + item_type = type_model.item_type + if isinstance(item_type, String): + # Array of string is replaced by `VecString` type + typ = VecString() + elif ( + model_ref + and model_ref.name == "links" + and model_ref.type == model.Array + ): + # Array of "links" is replaced by Json Value + typ = common_rust.JsonValue() + self.ignored_models.append(type_model.item_type) + elif ( + isinstance(item_type, model.Reference) + and type_model.item_type.type == model.Struct + ): + # Array of complex Structs is replaced on output by Json Value + typ = common_rust.JsonValue() + self.ignored_models.append(item_type) + if typ: + if model_ref: + self.refs[model_ref] = typ + else: + # Not hacked anything, invoke superior method + typ = super().convert_model(type_model) + + # POST hacks + if typ and isinstance(typ, common_rust.StringEnum): + # There is no sense of Enum in the output. Convert to the plain + # string + typ = String( + description=common_rust.sanitize_rust_docstrings( + typ.description + ) + ) + if ( + typ + and isinstance(typ, ArrayResponse) + and isinstance(typ.item_type, common_rust.Enum) + ): + # Array of complex Enums is replaced on output by Json Value + self.ignored_models.append(typ.item_type) + typ = common_rust.JsonValue() + return typ + + def _simplify_oneof_combinations(self, type_model, kinds): + """Simplify certain known oneOf combinations""" + kinds_classes = [x["class"] for x in kinds] + if ( + common_rust.String in kinds_classes + and common_rust.Number in kinds_classes + ): + # oneOf [string, number] => NumString + kinds.clear() + kinds.append({"local": NumString(), "class": NumString}) + elif ( + common_rust.String in kinds_classes + and common_rust.Integer in kinds_classes + ): + # oneOf [string, integer] => NumString + kinds.clear() + kinds.append({"local": IntString(), "class": IntString}) + elif ( + common_rust.String in kinds_classes + and common_rust.Boolean in kinds_classes + ): + # oneOf [string, boolean] => String + kinds.clear() + kinds.append({"local": BoolString(), "class": BoolString}) + super()._simplify_oneof_combinations(type_model, kinds) + + def _get_struct_type(self, type_model: model.Struct) -> common_rust.Struct: + """Convert model.Struct into Rust `Struct`""" + struct_class = self.data_type_mapping[model.Struct] + mod = struct_class( + name=self.get_model_name(type_model.reference), + description=common_rust.sanitize_rust_docstrings( + type_model.description + ), + ) + field_class = mod.field_type_class_ + for field_name, field in type_model.fields.items(): + is_nullable: bool = False + field_data_type = self.convert_model(field.data_type) + if isinstance(field_data_type, self.option_type_class): + # Unwrap Option into "is_nullable" NOTE: but perhaps + # Option<Option> is better (not set vs set explicitly to None + # ) + is_nullable = True + if isinstance(field_data_type.item_type, common_rust.Array): + # Unwrap Option<Option<Vec...>> + field_data_type = field_data_type.item_type + elif isinstance(field_data_type, struct_class): + field_data_type = JsonValue(**field_data_type.model_dump()) + f = field_class( + local_name=self.get_local_attribute_name(field_name), + remote_name=self.get_remote_attribute_name(field_name), + description=common_rust.sanitize_rust_docstrings( + field.description + ), + data_type=field_data_type, + is_optional=not field.is_required, + is_nullable=is_nullable, + ) + mod.fields[field_name] = f + if type_model.additional_fields: + definition = type_model.additional_fields + # Structure allows additional fields + if isinstance(definition, bool): + mod.additional_fields_type = self.primitive_type_mapping[ + model.PrimitiveAny + ] + else: + mod.additional_fields_type = self.convert_model(definition) + return mod + + def get_subtypes(self): + """Get all subtypes excluding TLA""" + emited_data: set[str] = set() + for k, v in self.refs.items(): + if ( + k + and isinstance( + v, + ( + common_rust.Enum, + common_rust.Struct, + common_rust.StringEnum, + common_rust.Dictionary, + common_rust.Array, + ), + ) + and k.name != "Body" + ): + key = v.base_type + v.type_hint + if key not in emited_data: + emited_data.add(key) + yield v + + def get_imports(self): + """Get complete set of additional imports required by all models in scope""" + imports: set[str] = super().get_imports() + imports.discard("crate::common::parse_json") + return imports + + +class RustCliGenerator(BaseGenerator): + def __init__(self): + super().__init__() + + def _format_code(self, *args): + """Format code using Rustfmt + + :param *args: Path to the code to format + """ + for path in args: + subprocess.run(["rustfmt", "--edition", "2021", path]) + + def get_parser(self, parser): + parser.add_argument( + "--operation-type", + choices=[ + "list", + "show", + "create", + "set", + "action", + "delete", + "download", + "upload", + "json", + ], + help="Rust CLI Command type (only for rust-cli target)", + ) + parser.add_argument( + "--command-name", + help="Rust CLI Command name (used as final module name)", + ) + parser.add_argument( + "--cli-mod-path", + help="Mod path (dot separated) of the corresponding SDK command (when non standard)", + ) + + parser.add_argument( + "--sdk-mod-path", + help="Mod path (dot separated) of the corresponding SDK command (when non standard)", + ) + + return parser + + def _render_command( + self, + context: dict, + impl_template: str, + impl_dest: Path, + ): + """Render command code""" + self._render(impl_template, context, impl_dest.parent, impl_dest.name) + + def generate( + self, res, target_dir, openapi_spec=None, operation_id=None, args=None + ): + """Generate code for the Rust openstack_cli""" + logging.debug( + "Generating Rust CLI code for `%s` in %s" + % (operation_id, target_dir) + ) + work_dir = Path(target_dir, "rust", "openstack_cli", "src") + + if not openapi_spec: + openapi_spec = common.get_openapi_spec(args.openapi_yaml_spec) + if not operation_id: + operation_id = args.openapi_operation_id + + (path, method, spec) = common.find_openapi_operation( + openapi_spec, operation_id + ) + _, res_name = res.split(".") if res else (None, None) + resource_name = common.get_resource_names_from_url(path)[-1] + + openapi_parser = model.OpenAPISchemaParser() + operation_params: list[model.RequestParameter] = [] + sdk_mod_path_base = common.get_rust_sdk_mod_path( + args.service_type, + args.api_version, + args.module_path or path, + ) + cli_mod_path = common.get_rust_cli_mod_path( + args.service_type, + args.api_version, + args.module_path or path, + ) + target_class_name = resource_name + is_image_download: bool = False + is_json_patch: bool = False + + # Collect all operation parameters + for param in openapi_spec["paths"][path].get( + "parameters", [] + ) + spec.get("parameters", []): + if ( + ("{" + param["name"] + "}") in path and param["in"] == "path" + ) or param["in"] != "path": + # Respect path params that appear in path and not path params + param_ = openapi_parser.parse_parameter(param) + if param_.name == f"{resource_name}_id": + # for i.e. routers/{router_id} we want local_name to be `id` and not `router_id` + param_.name = "id" + operation_params.append(param_) + + # List of operation variants (based on the body) + operation_variants = common_rust.get_operation_variants( + spec, args.operation_name + ) + + body_types: list[str] = [] + last_path_parameter: RequestParameter | None = None + if ( + args.operation_type == "download" + and path == "/v2/images/{image_id}/file" + ): + is_image_download = True + + if args.operation_type == "upload": + # collect registered media types for upload operation + request_body = spec.get("requestBody") + content = request_body.get("content", {}) + body_types = list(content.keys()) + + for operation_variant in operation_variants: + logging.debug("Processing variant %s" % operation_variant) + additional_imports = set() + type_manager: common_rust.TypeManager = RequestTypeManager() + response_type_manager: common_rust.TypeManager = ( + ResponseTypeManager() + ) + result_is_list: bool = False + is_list_paginated: bool = False + if operation_params: + type_manager.set_parameters(operation_params) + + mod_name = "_".join( + x.lower() + for x in re.split( + common.SPLIT_NAME_RE, + ( + args.module_name + or args.operation_name + or args.operation_type + or method + ), + ) + ) + + operation_body = operation_variant.get("body") + microversion: str | None = None + mod_suffix: str = "" + request_types = None + if operation_body: + min_ver = operation_body.get("x-openstack", {}).get("min-ver") + if min_ver: + mod_suffix = "_" + min_ver.replace(".", "") + microversion = min_ver + + (_, request_types) = openapi_parser.parse( + operation_body, ignore_read_only=True + ) + + # Certain hacks + for parsed_type in list(request_types): + # iterate over listed request_types since we want to modify list + if resource_name == "server" and method.lower() == "post": + # server declares OS-SCH-HNT:scheduler_hints as + # "alias" for normal scheduler hints, but the whole + # struct is there. For the cli it makes no sense and + # we filter it out from the parsed data + object_to_remove = "OS-SCH-HNT:scheduler_hints" + if ( + parsed_type.reference + and parsed_type.reference.name == object_to_remove + and parsed_type.reference.type == model.Struct + ): + request_types.remove(parsed_type) + elif parsed_type.reference is None and isinstance( + parsed_type, model.Struct + ): + parsed_type.fields.pop(object_to_remove, None) + + # and feed them into the TypeManager + type_manager.set_models(request_types) + + sdk_mod_path: list[str] = sdk_mod_path_base.copy() + sdk_mod_path.append((args.sdk_mod_name or mod_name) + mod_suffix) + mod_name += mod_suffix + + result_def: dict = {} + response_def: dict | None = {} + resource_header_metadata: dict = {} + + # Process response information + # # Prepare information about response + if method.upper() != "HEAD": + response = common.find_response_schema( + spec["responses"], + args.response_key or resource_name, + ( + args.operation_name + if args.operation_type == "action" + else None + ), + ) + + if response: + response_def, _ = common.find_resource_schema( + response, + None, + args.response_key or resource_name, + ) + + if response_def: + if response_def.get("type", "object") == "object" or ( + # BS metadata is defined with type: ["object", + # "null"] + isinstance(response_def.get("type"), list) + and "object" in response_def["type"] + ): + (_, response_types) = openapi_parser.parse( + response_def + ) + response_type_manager.set_models(response_types) + if method == "patch" and not request_types: + # image patch is a jsonpatch based operation + # where there is no request. For it we need to + # look at the response and get writable + # parameters as a base + is_json_patch = True + if not args.find_implemented_by_sdk: + raise NotImplementedError + additional_imports.update( + [ + "json_patch::{Patch, diff}", + "serde_json::json", + ] + ) + (_, response_types) = openapi_parser.parse( + response_def, ignore_read_only=True + ) + type_manager.set_models(response_types) + + elif response_def["type"] == "string": + (root_dt, _) = openapi_parser.parse(response_def) + if not root_dt: + raise RuntimeError( + "Response data can not be processed" + ) + field = common_rust.StructField( + local_name="dummy", + remote_name="dummy", + data_type=response_type_manager.convert_model( + root_dt + ), + is_optional=False, + ) + tuple_struct = TupleStruct(name="Response") + tuple_struct.tuple_fields.append(field) + response_type_manager.refs[ + model.Reference(name="Body", type=TupleStruct) + ] = tuple_struct + elif ( + response_def["type"] == "array" + and "items" in response_def + ): + (_, response_types) = openapi_parser.parse( + response_def["items"] + ) + response_type_manager.set_models(response_types) + + response_props = response.get("properties", {}) + if ( + response_props + and response_props[ + list(response_props.keys())[0] + ].get("type") + == "array" + ): + result_is_list = True + + root_type = response_type_manager.get_root_data_type() + + mod_import_name = "openstack_sdk::api::" + "::".join( + f"r#{x}" if x in ["type"] else x for x in sdk_mod_path + ) + + if not ( + args.find_implemented_by_sdk + and args.operation_type + in [ + "show", + "download", + ] + ): + additional_imports.add(mod_import_name) + + if args.find_implemented_by_sdk and args.operation_type in [ + "show", + "set", + "download", + ]: + additional_imports.add("openstack_sdk::api::find") + additional_imports.add( + "::".join( + [ + "openstack_sdk::api", + "::".join( + f"r#{x}" if x in ["type"] else x + for x in sdk_mod_path[:-1] + ), + "find", + ] + ) + ) + + if args.operation_type == "list": + # Make plural form for listing + target_class_name = common.get_plural_form( + target_class_name + ) + if "limit" in [ + k for (k, _) in type_manager.get_parameters("query") + ]: + is_list_paginated = True + additional_imports.add( + "openstack_sdk::api::{paged, Pagination}" + ) + if args.operation_type == "download": + additional_imports.add("crate::common::download_file") + + if args.operation_type == "upload": + additional_imports.add( + "crate::common::build_upload_asyncread" + ) + if ( + ( + isinstance(root_type, StructResponse) + and root_type.fields + ) + or ( + isinstance(root_type, TupleStruct) + and root_type.tuple_fields + ) + or (isinstance(root_type, common_rust.Dictionary)) + ): + additional_imports.add("openstack_sdk::api::QueryAsync") + else: + additional_imports.add("openstack_sdk::api::RawQueryAsync") + additional_imports.add("http::Response") + additional_imports.add("bytes::Bytes") + + if isinstance(root_type, StructResponse): + additional_imports.add("structable_derive::StructTable") + + if resource_header_metadata: + additional_imports.add( + "crate::common::HashMapStringString" + ) + additional_imports.add("std::collections::HashMap") + if ( + len( + [ + x + for x in resource_header_metadata.keys() + if "*" in x + ] + ) + > 0 + ): + additional_imports.add("regex::Regex") + + for st in response_type_manager.get_subtypes(): + if isinstance(st, StructResponse) or getattr( + st, "base_type", None + ) in ["vec", "dict"]: + additional_imports.add("std::fmt") + break + + if is_image_download: + additional_imports.add("openstack_sdk::api::find") + additional_imports.add("openstack_sdk::api::QueryAsync") + additional_imports.add( + "::".join( + [ + "openstack_sdk::api", + "::".join(sdk_mod_path[:-2]), + "find", + ] + ) + ) + # Discard unnecessry imports + additional_imports.discard("http::Response") + additional_imports.discard("bytes::Bytes") + + additional_imports.update(type_manager.get_imports()) + additional_imports.update(response_type_manager.get_imports()) + # Deserialize is already in template since it is uncoditionally required + additional_imports.discard("serde::Deserialize") + additional_imports.discard("serde::Serialize") + + command_description: str = spec.get("description") + command_summary: str = spec.get("summary") + if args.operation_type == "action": + command_description = operation_body.get( + "description", command_description + ) + command_summary = operation_body.get( + "summary", command_summary + ) + + if command_summary and microversion: + command_summary += f" (microversion = {microversion})" + if not command_description: + command_description = ( + "Command without description in OpenAPI" + ) + context = dict( + operation_id=operation_id, + operation_type=args.operation_type, + command_description=common_rust.sanitize_rust_docstrings( + command_description + ), + command_summary=common_rust.sanitize_rust_docstrings( + command_summary + ), + type_manager=type_manager, + resource_name=resource_name, + response_type_manager=response_type_manager, + target_class_name="".join( + x.title() for x in target_class_name.split("_") + ), + sdk_struct_name="Request", + sdk_service_name=common.get_rust_service_type_from_str( + args.service_type + ), + service_type=args.service_type, + url=path[1:] if path.startswith("/") else path, + method=method, + resource_key=None, + resource_header_metadata=resource_header_metadata, + sdk_mod_path=sdk_mod_path, + cli_mod_path=cli_mod_path, + result_def=result_def, + # Last path param is required for the download operation + last_path_parameter=last_path_parameter, + body_types=body_types, + additional_imports=additional_imports, + find_present=args.find_implemented_by_sdk, + microversion=microversion, + result_is_list=result_is_list, + is_image_download=is_image_download, + is_json_patch=is_json_patch, + is_list_paginated=is_list_paginated, + ) + + if not args.cli_mod_path: + # mod_name = args.operation_name or args.operation_type.value + impl_path = Path( + work_dir, "/".join(cli_mod_path), f"{mod_name}.rs" + ) + + self._render_command( + context, + "rust_cli/impl.rs.j2", + impl_path, + ) + + self._format_code(impl_path) + + yield (cli_mod_path, mod_name, path) diff --git a/codegenerator/rust_sdk.py b/codegenerator/rust_sdk.py new file mode 100644 index 0000000..cbf9be8 --- /dev/null +++ b/codegenerator/rust_sdk.py @@ -0,0 +1,572 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import logging +from pathlib import Path +import re +import subprocess +from typing import Type, Any + +from codegenerator.base import BaseGenerator +from codegenerator import common +from codegenerator import model +from codegenerator.common import BaseCompoundType +from codegenerator.common import rust as common_rust + + +class String(common_rust.String): + lifetimes: set[str] = set(["'a"]) + type_hint: str = "Cow<'a, str>" + + @property + def imports(self) -> set[str]: + return set(["std::borrow::Cow"]) + + +class Enum(common_rust.Enum): + @property + def builder_macros(self): + macros: set[str] = set(["setter(into)"]) + return macros + + @property + def builder_container_macros(self): + return "" + + @property + def serde_container_macros(self): + return "#[serde(untagged)]" + + @property + def derive_container_macros(self): + return "#[derive(Debug, Deserialize, Clone, Serialize)]" + + def get_sample(self): + (first_kind_name, first_kind_val) = list(sorted(self.kinds.items()))[0] + res = ( + self.name + + "::" + + first_kind_name + + "(" + + first_kind_val.data_type.get_sample() + + ( + ".into()" + if isinstance(first_kind_val.data_type, String) + else "" + ) + + ")" + ) + return res + + +class StructField(common_rust.StructField): + @property + def builder_macros(self): + macros: set[str] = set([]) + if not isinstance(self.data_type, BaseCompoundType): + macros.update(self.data_type.builder_macros) + elif not isinstance(self.data_type, common_rust.StringEnum): + macros.add("setter(into)") + if "private" in macros: + macros.add(f'setter(name="_{self.local_name}")') + if self.is_optional: + default_set: bool = False + for macro in macros: + if "default" in macro: + default_set = True + break + if not default_set: + macros.add("default") + return f"#[builder({', '.join(sorted(macros))})]" + + @property + def serde_macros(self): + macros = set([]) + if self.local_name != self.remote_name: + macros.add(f'rename="{self.remote_name}"') + if self.is_optional: + macros.add('skip_serializing_if = "Option::is_none"') + return f"#[serde({', '.join(sorted(macros))})]" + + +class Struct(common_rust.Struct): + # field_type_class_ = StructField + field_type_class_: Type[StructField] | StructField = StructField + + @property + def builder_macros(self): + return set() + + @property + def derive_container_macros(self): + return "#[derive(Builder, Debug, Deserialize, Clone, Serialize)]" + + @property + def builder_container_macros(self): + return "#[builder(setter(strip_option))]" + + @property + def serde_container_macros(self): + return "" + + @property + def static_lifetime(self): + """Return Rust `<'lc>` lifetimes representation""" + return f"<{', '.join(self.lifetimes)}>" if self.lifetimes else "" + + def get_sample(self): + res = [self.name + "Builder::default()"] + for field in sorted(self.fields.values(), key=lambda d: d.local_name): + if not field.is_optional: + data = f".{field.local_name}(" + data += field.data_type.get_sample() + data += ")" + res.append(data) + res.append(".build().unwrap()") + return "".join(res) + + def get_mandatory_init(self): + res = [] + for field in self.fields.values(): + if not isinstance(field.data_type, common_rust.Null): + if not field.is_optional: + el = field.data_type.get_sample() + if el: + data = f".{field.local_name}(" + data += el + data += ")" + res.append(data) + return "".join(res) + + +class BTreeMap(common_rust.Dictionary): + builder_macros: set[str] = set(["private"]) + requires_builder_private_setter: bool = True + + @property + def type_hint(self): + return f"BTreeMap<Cow<'a, str>, {self.value_type.type_hint}>" + + @property + def imports(self): + imports = set(["std::collections::BTreeMap"]) + imports.update(self.value_type.imports) + return imports + + @property + def lifetimes(self): + lt = set(["'a"]) + if self.value_type.lifetimes: + lt.update(self.value_type.lifetimes) + return lt + + def get_sample(self): + if isinstance(self.value_type, common_rust.Option): + return ( + "BTreeMap::<String, Option<String>>::new().into_iter()" + ".map(|(k, v)| (k, v.map(Into::into)))" + ) + else: + return "BTreeMap::<String, String>::new().into_iter()" + + def get_mandatory_init(self): + return "" + + +class BTreeSet(common_rust.BTreeSet): + builder_macros: set[str] = set(["private"]) + requires_builder_private_setter: bool = True + + +class CommaSeparatedList(common_rust.CommaSeparatedList): + @property + def builder_macros(self): + return set() + + @property + def imports(self): + imports: set[str] = set([]) + imports.add("crate::api::common::CommaSeparatedList") + imports.update(self.item_type.imports) + return imports + + +class RequestParameter(common_rust.RequestParameter): + """OpenAPI request parameter in the Rust SDK form""" + + @property + def builder_macros(self): + macros = self.data_type.builder_macros + macros.add("default") + if self.setter_name: + macros.add(f'setter(name="_{self.setter_name}")') + macros.add("private") + macros.discard("setter(into)") + return f"#[builder({', '.join(sorted(macros))})]" + + +class TypeManager(common_rust.TypeManager): + """Rust SDK type manager + + The class is responsible for converting ADT models into types suitable + for Rust (SDK). + + """ + + primitive_type_mapping: dict[Type[model.PrimitiveType], Type[Any]] = { + model.PrimitiveString: String, + model.ConstraintString: String, + } + + data_type_mapping = { + model.Dictionary: BTreeMap, + model.Enum: Enum, + model.Struct: Struct, + model.CommaSeparatedList: CommaSeparatedList, + } + + request_parameter_class: Type[common_rust.RequestParameter] = ( + RequestParameter + ) + + def set_parameters(self, parameters: list[model.RequestParameter]) -> None: + """Set OpenAPI operation parameters into typemanager for conversion""" + super().set_parameters(parameters) + for k, param in self.parameters.items(): + if isinstance(param.data_type, common_rust.CommaSeparatedList): + param.setter_name = param.local_name + param.setter_type = "csv" + elif isinstance(param.data_type, common_rust.BTreeSet): + param.setter_name = param.local_name + param.setter_type = "set" + elif isinstance(param.data_type, common_rust.Array): + param.setter_name = param.local_name + param.setter_type = "list" + self.parameters[k] = param + + +class RustSdkGenerator(BaseGenerator): + def __init__(self): + super().__init__() + + def _format_code(self, *args): + """Format code using Rustfmt + + :param *args: Path to the code to format + """ + for path in args: + subprocess.run(["rustfmt", "--edition", "2021", path]) + + def get_parser(self, parser): + parser.add_argument( + "--response-key", + help="Rust SDK response key (only required when normal detection does not work)", + ) + + parser.add_argument( + "--response-list-item-key", + help='Rust SDK list response item key (specifies whether list items are wrapped in additional container `{"keypairs":["keypair":{}]}`)', + ) + + return parser + + def _render_command( + self, + context: dict, + impl_template: str, + impl_dest: Path, + ): + """Render command code""" + self._render(impl_template, context, impl_dest.parent, impl_dest.name) + + def generate( + self, res, target_dir, openapi_spec=None, operation_id=None, args=None + ): + """Generate code for the Rust openstack_sdk""" + logging.debug( + "Generating Rust SDK code for %s in %s [%s]", + operation_id, + target_dir, + args, + ) + + if not openapi_spec: + openapi_spec = common.get_openapi_spec(args.openapi_yaml_spec) + if not operation_id: + operation_id = args.openapi_operation_id + (path, method, spec) = common.find_openapi_operation( + openapi_spec, operation_id + ) + if args.operation_type == "find": + yield self.generate_find_mod( + target_dir, + args.sdk_mod_path.split("::"), + res.split(".")[-1], + args.name_field, + args.list_mod, + openapi_spec, + path, + method, + spec, + args.name_filter_supported, + ) + return + + # srv_name, res_name = res.split(".") if res else (None, None) + path_resources = common.get_resource_names_from_url(path) + res_name = path_resources[-1] + + mime_type = None + openapi_parser = model.OpenAPISchemaParser() + operation_params: list[model.RequestParameter] = [] + type_manager: TypeManager | None = None + is_json_patch: bool = False + # Collect all operation parameters + for param in openapi_spec["paths"][path].get( + "parameters", [] + ) + spec.get("parameters", []): + if ( + ("{" + param["name"] + "}") in path and param["in"] == "path" + ) or param["in"] != "path": + # Respect path params that appear in path and not path params + param_ = openapi_parser.parse_parameter(param) + if param_.name == f"{res_name}_id": + path = path.replace(f"{res_name}_id", "id") + # for i.e. routers/{router_id} we want local_name to be `id` and not `router_id` + param_.name = "id" + operation_params.append(param_) + + # Process body information + # List of operation variants (based on the body) + operation_variants = common_rust.get_operation_variants( + spec, args.operation_name + ) + + for operation_variant in operation_variants: + logging.debug("Processing variant %s" % operation_variant) + # TODO(gtema): if we are in MV variants filter out unsupported query + # parameters + # TODO(gtema): previously we were ensuring `router_id` path param + # is renamed to `id` + + class_name = res_name.title() + operation_body = operation_variant.get("body") + type_manager = TypeManager() + type_manager.set_parameters(operation_params) + mod_name = "_".join( + x.lower() + for x in re.split( + common.SPLIT_NAME_RE, + ( + args.module_name + or args.operation_name + or args.operation_type.value + or method + ), + ) + ) + + if operation_body: + min_ver = operation_body.get("x-openstack", {}).get("min-ver") + if min_ver: + mod_name += "_" + min_ver.replace(".", "") + # There is request body. Get the ADT from jsonschema + # if args.operation_type != "action": + (_, all_types) = openapi_parser.parse( + operation_body, ignore_read_only=True + ) + # and feed them into the TypeManager + type_manager.set_models(all_types) + # else: + # logging.warn("Ignoring response type of action") + + if method == "patch": + # There might be multiple supported mime types. We only select ones we are aware of + mime_type = operation_variant.get("mime_type") + if not mime_type: + raise RuntimeError( + "No supported mime types for patch operation found" + ) + if mime_type != "application/json": + is_json_patch = True + + mod_path = common.get_rust_sdk_mod_path( + args.service_type, + args.api_version, + args.alternative_module_path or path, + ) + + response_def = None + response_key = None + # Get basic information about response + if method.upper() != "HEAD": + for code, rspec in spec["responses"].items(): + if not code.startswith("2"): + continue + content = rspec.get("content", {}) + if "application/json" in content: + response_spec = content["application/json"] + try: + ( + response_def, + response_key, + ) = common.find_resource_schema( + response_spec["schema"], + None, + resource_name=res_name.lower(), + ) + except Exception: + # Most likely we have response which is oneOf. + # For the SDK it does not really harm to ignore + # this. + pass + # response_def = (None,) + response_key = None + + context = dict( + operation_id=operation_id, + operation_type=spec.get( + "x-openstack-operation-type", args.operation_type + ), + command_description=common.make_ascii_string( + spec.get("description") + ), + class_name=class_name, + sdk_service_name=common.get_rust_service_type_from_str( + args.service_type + ), + url=path[1:] if path.startswith("/") else path, + method=method, + type_manager=type_manager, + response_key=args.response_key or response_key, + response_list_item_key=args.response_list_item_key, + mime_type=mime_type, + is_json_patch=is_json_patch, + ) + + work_dir = Path(target_dir, "rust", "openstack_sdk", "src") + impl_path = Path( + work_dir, + "api", + "/".join(mod_path), + f"{mod_name}.rs", + ) + + # Generate methods for the GET resource command + self._render_command( + context, + "rust_sdk/impl.rs.j2", + impl_path, + ) + + self._format_code(impl_path) + + yield (mod_path, mod_name, path) + + def generate_mod( + self, target_dir, mod_path, mod_list, url, resource_name, service_name + ): + """Generate collection module (include individual modules)""" + work_dir = Path(target_dir, "rust", "openstack_sdk", "src") + impl_path = Path( + work_dir, + "api", + "/".join(mod_path[0:-1]), + f"{mod_path[-1]}.rs", + ) + + context = dict( + mod_list=mod_list, + mod_path=mod_path, + url=url, + resource_name=resource_name, + service_name=service_name, + ) + + # Generate methods for the GET resource command + self._render_command( + context, + "rust_sdk/mod.rs.j2", + impl_path, + ) + + self._format_code(impl_path) + + def generate_find_mod( + self, + target_dir, + mod_path, + resource_name, + name_field: str, + list_mod: str, + openapi_spec, + path: str, + method: str, + spec, + name_filter_supported: bool = False, + ): + """Generate `find` operation module""" + work_dir = Path(target_dir, "rust", "openstack_sdk", "src") + impl_path = Path( + work_dir, + "api", + "/".join(mod_path), + "find.rs", + ) + # Collect all operation parameters + openapi_parser = model.OpenAPISchemaParser() + path_resources = common.get_resource_names_from_url(path) + res_name = path_resources[-1] + operation_path_params: list[model.RequestParameter] = [] + operation_query_params: list[model.RequestParameter] = [] + + for param in openapi_spec["paths"][path].get( + "parameters", [] + ) + spec.get("parameters", []): + if ("{" + param["name"] + "}") in path and param["in"] == "path": + # Respect path params that appear in path and not in path params + param_ = openapi_parser.parse_parameter(param) + if param_.name == f"{res_name}_id": + path = path.replace(f"{res_name}_id", "id") + # for i.e. routers/{router_id} we want local_name to be `id` and not `router_id` + param_.name = "id" + operation_path_params.append(param_) + if param["in"] == "query": + # Capture query params to estimate lifetime of the operation + operation_query_params.append(param) + type_manager = TypeManager() + type_manager.set_parameters(operation_path_params) + + context = dict( + mod_path=mod_path, + resource_name=resource_name, + list_mod=list_mod, + name_filter_supported=name_filter_supported, + name_field=name_field, + type_manager=type_manager, + list_lifetime=( + "<'a>" + if operation_query_params or operation_path_params + else "" + ), + ) + + # Generate methods for the GET resource command + self._render_command( + context, + "rust_sdk/find.rs.j2", + impl_path, + ) + + self._format_code(impl_path) + + return (mod_path, "find", "dummy") diff --git a/codegenerator/templates/ansible/impl_mod.py.j2 b/codegenerator/templates/ansible/impl_mod.py.j2 new file mode 100644 index 0000000..67e383a --- /dev/null +++ b/codegenerator/templates/ansible/impl_mod.py.j2 @@ -0,0 +1,136 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +DOCUMENTATION = ''' +--- +module: {{ target_name }} +description: Manage {{ registry_name }} objects within OpenStack. +options: + name: + description: The resource name or id. + type: str +{%- for (k, v) in attrs.items() if not v["attr"].read_only and k != "name" %} +{%- set attr=v['attr'] %} + {{ k }}: + description: {{ v.get('docs', '') }} + type: {% if attr.type.__name__ == "str" or attr.type is none -%} + str +{%- else -%} + {{ attr.type.__name__ }} +{%- endif %} +{%- endfor %} +extends_documentation_fragment: + - openstack.cloud.openstack +''' + +EXAMPLES = ''' +- name: Create {{ registry_name }} object + openstack.cloud.{{ target_name }}: +{%- for (k, v) in attrs.items() if not v["attr"].read_only %} + {{ k }}: {{ fake_resource[k] }} +{%- endfor %} + register: obj +''' + +RETURN = ''' +{{ target_name }}: + description: | + Dictionary describing the identified (and possibly modified) OpenStack cloud resource. + returned: On success when I(state) is C(present). + type: dict + contains: +{%- for (k, v) in attrs.items() %} +{%- set attr=v['attr'] %} + {{ k }}: +{%- if v['docs'] and v['docs']|length > 0 %} + description: "{{ v.get('docs', '') }}" +{%- else %} + description: "{{ k }}" +{%- endif %} {# if docs #} +{%- if attr.type.__name__ == "str" or attr.type is none %} + type: str +{%- else %} + type: {{ attr.type.__name__ }} +{%- endif %} {# if attr.type #} +{%- endfor %} +''' + +from ansible_collections.openstack.cloud.plugins.module_utils.openstack import \ + OpenStackModule + + +class {{ ansible_module_name }}Module(OpenStackModule): + + argument_spec = dict( + name=dict(), +{%- for (k, v) in attrs.items() if not v["attr"].read_only and k != "name" %} +{%- set attr=v['attr'] %} + {{ k }}=dict( +{%- if attr.type.__name__ == "str" or attr.type is none -%} + type='str' +{%- else %} + type='{{ attr.type.__name__ }}' +{%- endif %} + ), +{%- endfor %} + + ) + + module_kwargs = dict( + supports_check_mode=True + ) + + def run(self): + sm = StateMachine( + connection=self.conn, + service_name="{{ sdk_service_name }}", + type_name="{{ resource_name }}", + sdk=self.sdk + ) + + kwargs = dict( + (k, self.params[k]) + for k in [ + "state", + "timeout", + "wait", + ] + ) + + kwargs["attributes"] = dict( +{%- for (k, v) in attrs.items() if not v["attr"].read_only and k != "name" %} +{%- set attr=v['attr'] %} + {{ k }}=self.params['{{ k }}'], +{%- endfor %} + ) + + kwargs["non_updateable_attributes"] = [ +{%- for (k, v) in attrs.items() if v["attr"].create_only or v["attr"].read_only %} + {{ k }}, +{%- endfor %} + ] + kwargs["updateable_attributes"] = [ +{%- for (k, v) in attrs.items() if not v["attr"].read_only %} + {{ k }}, +{%- endfor %} + ] + + resource, is_changed = sm(check_mode=self.ansible.check_mode, **kwargs) + + if resource is None: + self.exit_json(changed=is_changed) + else: + self.exit_json(changed=is_changed, + resource=resource.to_dict(computed=False)) + + +def main(): + module = {{ ansible_module_name }}Module() + module() + + +if __name__ == '__main__': + main() + diff --git a/codegenerator/templates/ansible/impl_mod_info.py.j2 b/codegenerator/templates/ansible/impl_mod_info.py.j2 new file mode 100644 index 0000000..f208463 --- /dev/null +++ b/codegenerator/templates/ansible/impl_mod_info.py.j2 @@ -0,0 +1,92 @@ +#!/usr/bin/python +# -*- coding: utf-8 -*- + +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + +DOCUMENTATION = ''' +--- +module: {{ target_name }}_info +description: Retrieve information about {{ registry_name }} objects within OpenStack. +options: + name: + description: The resource name or id. + type: str + filters: + description: + - A dictionary of meta data to use for further filtering. Elements + of this dictionary will be matched passed to the API as query + parameter filters. + type: dict +extends_documentation_fragment: + - openstack.cloud.openstack +''' + +EXAMPLES = ''' +- name: List all {{ registry_name }} objects + openstack.cloud.{{ target_name }}_info: + register: objects +''' + +RETURN = ''' +{{ target_name }}s: + description: | + List of {{ target_name }} objects dictionaries. A subset of the + dictionary keys listed below may be returned, depending on your cloud + provider. + returned: always + type: list + elements: dict + contains: +{%- for (k, v) in attrs.items() %} +{%- set attr=v['attr'] %} + {{ k }}: +{%- if v['docs'] and v['docs']|length > 0 %} + description: "{{ v.get('docs', '') }}" +{%- else %} + description: "{{ k }}" +{%- endif %} {# if docs #} +{%- if attr.type.__name__ == "str" or attr.type is none %} + type: str +{%- else %} + type: {{ attr.type.__name__ }} +{%- endif %} {# if attr.type #} +{%- endfor %} + +''' + +from ansible_collections.openstack.cloud.plugins.module_utils.openstack import \ + OpenStackModule + + +class {{ ansible_module_name }}InfoModule(OpenStackModule): + + argument_spec = dict( + name=dict(), + filters=dict(type='dict'), + ) + + module_kwargs = dict( + supports_check_mode=True + ) + + def run(self): + data = [ + v.to_dict(computed=False) + for v in self.conn.search_resources( + resource_type="{{ registry_name }}", + name_or_id=self.params['name'], + filters=self.params['filters'] + ) + ] + + self.exit_json(changed=False, {{ target_name }}s=data) + + +def main(): + module = {{ ansible_module_name }}InfoModule() + module() + + +if __name__ == '__main__': + main() + diff --git a/codegenerator/templates/ansible/test_playbook.yaml.j2 b/codegenerator/templates/ansible/test_playbook.yaml.j2 new file mode 100644 index 0000000..d979469 --- /dev/null +++ b/codegenerator/templates/ansible/test_playbook.yaml.j2 @@ -0,0 +1,55 @@ +--- +- module_defaults: + group/openstack.cloud.openstack: + cloud: " {{ '{{ cloud }}' }}" + # Listing modules individually is required for + # backward compatibility with Ansible 2.9 only + openstack.cloud.{{ target_name }}: + cloud: " {{ '{{ cloud }}' }}" + openstack.cloud.{{ target_name }}_info: + cloud: " {{ '{{ cloud }}' }}" + block: + - name: List {{ registry_name }} + openstack.cloud.{{ target_name }}_info: + register: objects + + - name: Assert return values of module + ansible.builtin.assert: + that: + - objects is not changed + + - name: Create {{ registry_name }} object + openstack.cloud.{{ target_name }}: +{%- for (k, v) in attrs.items() if not v["attr"].read_only %} + {{ k }}: {{ fake_resource[k] }} +{%- endfor %} + register: obj + + - name: Check created object + ansible.builtin.assert: + that: +{%- for (k, v) in attrs.items() if not v["attr"].read_only %} + - obj.resource.{{ k }} == {{ fake_resource[k] }} +{%- endfor %} + + - name: Delete {{ registry_name }} object + openstack.cloud.{{ target_name }}: + name: {{ fake_resource['name'] }} + state: absent + register: obj + + - name: Assert return values of module + ansible.builtin.assert: + that: + - obj is changed + + - name: Delete {{ registry_name }} object again + openstack.cloud.{{ target_name }}: + name: {{ fake_resource['name'] }} + state: absent + register: obj + + - name: Assert return values of module + ansible.builtin.assert: + that: + - obj is not changed diff --git a/codegenerator/templates/osc/impl_common.py.j2 b/codegenerator/templates/osc/impl_common.py.j2 new file mode 100644 index 0000000..b107fd0 --- /dev/null +++ b/codegenerator/templates/osc/impl_common.py.j2 @@ -0,0 +1,37 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +"""{{class_name}} implementations""" + +import logging + +from osc_lib.cli import format_columns # noqa +from osc_lib import exceptions # noqa +from osc_lib import utils + +LOG = logging.getLogger(__name__) + +# TODO(autogen): add required formatters +_formatters = { +} + + +def _get_resource_columns(item): + # TODO(autogen): Verify column renamings + column_map = { + + } + hidden_columns = ['links', 'location', 'original_name'] + return utils.get_osc_show_columns_for_sdk_resource( + item, column_map, hidden_columns) + diff --git a/codegenerator/templates/osc/impl_create.py.j2 b/codegenerator/templates/osc/impl_create.py.j2 new file mode 100644 index 0000000..2ebf901 --- /dev/null +++ b/codegenerator/templates/osc/impl_create.py.j2 @@ -0,0 +1,114 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +"""{{class_name}} implementations""" + +import logging + +from openstack import utils as sdk_utils +from osc_lib.command import command +from osc_lib import exceptions +from osc_lib.cli import parseractions # noqa +from osc_lib import utils + +from openstackclient.i18n import _ +from {{ osc_mod_name }} import common + +LOG = logging.getLogger(__name__) + +# TODO(autogen): add required formatters +_formatters = common._formatters + + +class Create{{ class_name }}(command.ShowOne): + _description = _("Create new {{ class_name }}") + + def get_parser(self, prog_name): + parser = super().get_parser(prog_name) + + parser.add_argument( + "name", + metavar="<{{ resource_name }}-name>", + help=_( + "New {{ resource_name }} name" + ), + ) +{%- for (k, v) in attrs.items() if not v["attr"].read_only and k != "name" %} +{%- set attr=v['attr'] %} + parser.add_argument( + "--{{ k | replace('is_', '') | replace('_', '-') }}", +{%- if attr.type.__name__ == "dict" %} + metavar='<key=value>', + action=parseractions.KeyValueAction, +{%- elif attr.type.__name__ == "bool" %} + action='store_true', +{%- elif attr.type.__name__ == "str" or attr.type is none %} + metavar="<{{ k }}>", +{%- elif attr.type.__name__ == "list" %} + metavar="<{{ k }}>", + action='append', + default=[], + dest='{{ k }}', +{%- endif %} {#- if attr.type == dict #} +{%- if attr.required_on_create %} + required=True, +{%- endif %} {#- if attr.required_on_create #} + help=_( +{%- if v['docs'] and v['docs']|length > 0 %} +{#- wrap long line with identation #} + "{{ v.get('docs', '') | wordwrap(59) | replace('\n', ' \"\n \"') }}" +{%- elif v['doc']|length == 0 %} + "" +{%- endif %} +{%- if attr.type.__name__ == "dict" %} + "(repeat option to set multiple properties)" +{%- elif attr.type.__name__ == "list" %} + "(repeat option to set multiple entries)" +{%- endif %} {#- if is dict #} + ) + ) +{%- endfor %} {#- for k,_ in attr.items #} + + return parser + + def take_action(self, parsed_args): + client = self.app.client_manager.sdk_connection.{{ sdk_service_name }} + + args = { +{%- for (k, v) in attrs.items() if not (v["attr"].read_only or v["attr"].min_microversion) %} + "{{ k }}": parsed_args.{{ k | replace('is_', '') }}, +{%- endfor %} {#- for k,_ in attr.items #} + } + +{%- for (k, v) in attrs.items() if v["attr"].min_microversion -%} +{%- set param_name = k.replace("is_", "") %} +{%- set min_mv = v["attr"].min_microversion %} + if parsed_args.{{ param_name }}: + if not sdk_utils.supports_microversion(client, "{{ min_mv }}"): + msg = _( + 'The --{{ param_name }} parameter requires server support for ' + 'API microversion {{ min_mv }}' + ) + raise exceptions.CommandError(msg) + args["{{ k }}"] = parsed_args.{{ param_name }} + +{%- endfor %} + + data = client.create_{{ resource_name }}(**args) + + display_columns, columns = common._get_resource_columns(data) + data = utils.get_dict_properties( + data, columns, formatters=common._formatters) + + return (display_columns, data) + diff --git a/codegenerator/templates/osc/impl_delete.py.j2 b/codegenerator/templates/osc/impl_delete.py.j2 new file mode 100644 index 0000000..99ffb24 --- /dev/null +++ b/codegenerator/templates/osc/impl_delete.py.j2 @@ -0,0 +1,66 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +"""delete {{class_name}} implementations""" + +import logging + +from osc_lib.command import command +from osc_lib import exceptions + +from openstackclient.i18n import _ + +LOG = logging.getLogger(__name__) + + +class Delete{{ class_name }}(command.Command): + _description = _("Delete {{ class_name }}(s)") + + def get_parser(self, prog_name): + parser = super().get_parser(prog_name) + + parser.add_argument( + '{{ resource_name }}', + metavar="<{{ resource_name }}>", + nargs="+", + help=_("{{ class_name }} to delete (name or ID)") + ) + + return parser + + def take_action(self, parsed_args): + client = self.app.client_manager.sdk_connection + + result = 0 + entities = parsed_args.{{ resource_name }} + for item in entities: + try: + items = client.search_resources( + resource_type="{{ registry_name }}", + name_or_id=item + ) + if len(items) == 1: + LOG.debug( + "Deleting {{ class_name }} with id=%s" % items[0].id) + client.{{ sdk_service_name }}.delete_{{ resource_name }}(items[0].id) + + except Exception as e: + result += 1 + LOG.error(_("Failed to delete {{ resource_name }} with name or " + "ID '%(item)s': %(e)s"), {'item': item, 'e': e}) + if result > 0: + total = len(entities) + msg = (_("%(result)s of %(total)s {{ resource_name }}s failed " + "to delete.") % {'result': result, 'total': total}) + raise exceptions.CommandError(msg) + diff --git a/codegenerator/templates/osc/impl_list.py.j2 b/codegenerator/templates/osc/impl_list.py.j2 new file mode 100644 index 0000000..14b18f3 --- /dev/null +++ b/codegenerator/templates/osc/impl_list.py.j2 @@ -0,0 +1,130 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +"""{{class_name}} implementations""" + +import logging + +from openstack import utils as sdk_utils +from osc_lib.command import command +from osc_lib import exceptions +from osc_lib import utils + +from openstackclient.i18n import _ +from {{ osc_mod_name }} import common + +LOG = logging.getLogger(__name__) + +# TODO(autogen): add required formatters +_formatters = common._formatters + + +class List{{ class_name }}(command.Lister): + _description = _("List {{ class_name }}s") + + def get_parser(self, prog_name): + parser = super().get_parser(prog_name) + +{%- for (k, _) in res._query_mapping._mapping.items() %} +{% set attr = attrs.get(k) %} + parser.add_argument( +{%- if not k.startswith('is_') %} + "--{{k | replace('_', '-') }}", +{%- else -%} {# if not k.startswith #} + "--{{ k | replace('is_', '') }}", + action="store_true", +{%- endif -%} {# if not k.startswith #} + help=_( +{%- if attr is defined and attr['docs'] and attr['docs']|length > 0 %} +{#- wrap long line with identation #} + "{{ attr.get('docs', '') | wordwrap(59) | replace('\n', ' \"\n \"') }}" +{%- elif attr is defined and attr['doc']|length == 0 %} + "" +{%- elif k == "limit" %} + "The last {{ class_name}} ID of the previous page" +{%- elif k == "marker" %} + "Maximum amount of entries to fetch in single API request" +{%- else -%} + "" +{%- endif %} + ) + ) +{%- endfor %} {#- for k,_ in query_mapping #} + + return parser + + def take_action(self, parsed_args): + client = self.app.client_manager.sdk_connection + + # TODO(autogen): Define columns to show + columns = ( +{%- for (k, _) in attrs.items() %} + "{{ k }}", +{%- endfor %} + ) + + column_headers = ( +{%- for (k, _) in attrs.items() %} + "{{ k.capitalize() }}", +{%- endfor %} + ) + + query_attrs = dict() + + if parsed_args.limit or parsed_args.marker: + # User passed explicit pagination request, switch off SDK + # pagination + query_attrs['paginated'] = False + +{%- for (k, _) in res._query_mapping._mapping.items() %} +{% set attr = attrs.get(k) %} + +{%- if not k.startswith('is_') %} + if parsed_args.{{k}}: + +{#- Min microversion handling #} +{%- if k in attrs and attrs[k]['attr'].min_microversion %} +{%- set min_mv = attrs[k]['attr'].min_microversion %} + if not sdk_utils.supports_microversion(client, "{{ min_mv }}"): + msg = _( + "The --{{ k }} parameter requires server support for " + "API microversion {{ min_mv }}" + ) + raise exceptions.CommandError(msg) +{%- endif %} +{#- End Min microversion handling #} + query_attrs["{{k}}"] = parsed_args.{{ k }} +{%- else %} + if parsed_args.{{k | replace('is_', '') }}: + query_attrs["{{k}}"] = parsed_args.{{ k | replace('is_', '') }} +{%- endif %} +{%- endfor %} + + data = client.search_resources( + resource_type="{{registry_name}}", + name_or_id=None, + filters=query_attrs + ) + + headers, attrs = utils.calculate_header_and_attrs( + column_headers, columns, parsed_args) + return ( + headers, + ( + utils.get_item_properties( + s, attrs, + formatters=_formatters, + ) for s in data + ) + ) + diff --git a/codegenerator/templates/osc/impl_set.py.j2 b/codegenerator/templates/osc/impl_set.py.j2 new file mode 100644 index 0000000..1a1fd83 --- /dev/null +++ b/codegenerator/templates/osc/impl_set.py.j2 @@ -0,0 +1,119 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +"""{{class_name}} implementations""" + +import logging + +from openstack import utils as sdk_utils +from osc_lib.command import command +from osc_lib import exceptions +from osc_lib.cli import parseractions # noqa +from osc_lib import utils + +from openstackclient.i18n import _ +from {{ osc_mod_name }} import common + +LOG = logging.getLogger(__name__) + +# TODO(autogen): add required formatters +_formatters = common._formatters + + +class Set{{ class_name }}(command.ShowOne): + _description = _("Set {{ class_name }} properties") + + def get_parser(self, prog_name): + parser = super().get_parser(prog_name) + + parser.add_argument( + "{{ resource_name }}", + metavar="<{{ resource_name }}>", + help=_( + "{{ resource_name }} to update (name or ID)" + ), + ) +{%- for (k, v) in attrs.items() if not v["attr"].read_only and not v["attr"].create_only %} +{%- set attr=v['attr'] %} + parser.add_argument( + "--{{ k | replace('is_', '') | replace('_', '-') }}", +{%- if attr.type.__name__ == "dict" %} + metavar='<key=value>', + action=parseractions.KeyValueAction, +{%- elif attr.type.__name__ == "bool" %} + action='store_true', +{%- elif attr.type.__name__ == "str" or attr.type is none %} + metavar="<{{ k }}>", +{%- elif attr.type.__name__ == "list" %} + metavar="<{{ k }}>", + action='append', + default=[], + dest='{{ k }}', +{%- endif %} {#- if attr.type == dict #} + help=_( +{%- if v['docs'] and v['docs']|length > 0 %} +{#- wrap long line with identation #} + "{{ v.get('docs', '') | wordwrap(59) | replace('\n', ' \"\n \"') }}" +{%- elif v['doc']|length == 0 %} + "" +{%- endif %} +{%- if attr.type.__name__ == "dict" %} + "(repeat option to set multiple properties)" +{%- elif attr.type.__name__ == "list" %} + "(repeat option to set multiple entries)" +{%- endif %} {#- if is dict #} + ) + ) +{%- endfor %} {#- for k,_ in attr.items #} + + return parser + + def take_action(self, parsed_args): + client = self.app.client_manager.sdk_connection.{{ sdk_service_name }} + + args = { +{%- for (k, v) in attrs.items() if not (v["attr"].read_only or v["attr"].min_microversion or v["attr"].create_only) %} + "{{ k }}": parsed_args.{{ k | replace('is_', '') }}, +{%- endfor %} {#- for k,_ in attr.items #} + } + +{%- for (k, v) in attrs.items() if v["attr"].min_microversion -%} +{%- set param_name = k.replace("is_", "") %} +{%- set min_mv = v["attr"].min_microversion %} + if parsed_args.{{ param_name }}: + if not sdk_utils.supports_microversion(client, "{{ min_mv }}"): + msg = _( + 'The --{{ param_name }} parameter requires server support for ' + 'API microversion {{ min_mv }}' + ) + raise exceptions.CommandError(msg) + args["{{ k }}"] = parsed_args.{{ param_name }} + +{%- endfor %} + + resource = self.app.client_manager.sdk_connection.search_resources( + resource_type="{{ registry_name }}", + name_or_id=parsed_args.{{ resource_name }}, + filters=None + ) + + data = client.update_{{ resource_name }}( + resource.id, + **args) + + display_columns, columns = common._get_resource_columns(data) + data = utils.get_dict_properties( + data, columns, formatters=common._formatters) + + return (display_columns, data) + diff --git a/codegenerator/templates/osc/impl_show.py.j2 b/codegenerator/templates/osc/impl_show.py.j2 new file mode 100644 index 0000000..caaa240 --- /dev/null +++ b/codegenerator/templates/osc/impl_show.py.j2 @@ -0,0 +1,66 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +"""show {{class_name}} implementations""" + +import logging + +from osc_lib.command import command +from osc_lib import exceptions +from osc_lib import utils + +from openstackclient.i18n import _ +from {{ osc_mod_name }} import common + +LOG = logging.getLogger(__name__) + +_formatters = common._formatters + + +class Show{{ class_name }}(command.ShowOne): + _description = _("Show single {{ class_name }} details") + + def get_parser(self, prog_name): + parser = super().get_parser(prog_name) + + parser.add_argument( + '{{ resource_name }}', + metavar="<{{ resource_name }}>", + help=_("{{ class_name }} to display (name or ID)") + ) + + return parser + + def take_action(self, parsed_args): + client = self.app.client_manager.sdk_connection + + name_or_id = parsed_args.{{ resource_name }} + data = client.search_resources( + resource_type="{{ registry_name }}", + name_or_id=name_or_id, + filters=None + ) + if len(data) == 1: + data = data[0] + elif len(data) == 0: + raise exceptions.CommandError( + "No {{ resource_name }} with a name or ID of '%s' exists." + % name_or_id + ) + + display_columns, columns = common._get_resource_columns(data) + data = utils.get_dict_properties( + data, columns, formatters=common._formatters) + + return (display_columns, data) + diff --git a/codegenerator/templates/osc/impl_unset.py.j2 b/codegenerator/templates/osc/impl_unset.py.j2 new file mode 100644 index 0000000..c7fc693 --- /dev/null +++ b/codegenerator/templates/osc/impl_unset.py.j2 @@ -0,0 +1,100 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +"""{{class_name}} implementations""" + +import logging + +from openstack import utils as sdk_utils +from osc_lib.command import command +from osc_lib import exceptions +from osc_lib.cli import parseractions # noqa +from osc_lib import utils + +from openstackclient.i18n import _ +from {{ osc_mod_name }} import common + +LOG = logging.getLogger(__name__) + +# TODO(autogen): add required formatters +_formatters = common._formatters + + +class Unset{{ class_name }}(command.ShowOne): + _description = _("Unset {{ class_name }} properties") + + def get_parser(self, prog_name): + parser = super().get_parser(prog_name) + + parser.add_argument( + "{{ resource_name }}", + metavar="<{{ resource_name }}>", + help=_( + "{{ resource_name }} to unset property from (name or ID)" + ), + ) + + parser.add_argument( + "--property", + metavar="<key>", + action="append", + default=[], + dest="properties", + help=_("Property to remove from {{ resource_name }} " + "(repeat option to remove multiple properties)") + ) + + return parser + + def take_action(self, parsed_args): + client = self.app.client_manager.sdk_connection.{{ sdk_service_name }} + + unset_props = parsed_args.properties + new_props = {} +{%- for (k, v) in attrs.items() if not (v["attr"].read_only or v["attr"].create_only) %} + if "{{ k | replace('is_', '') }}" in unset_props: + unset_props.remove("{{ k | replace('is_', '') }}") +{%- if v["attr"].min_microversion %} +{%- set min_mv = attrs[k]['attr'].min_microversion %} + if not sdk_utils.supports_microversion(client, "{{ min_mv }}"): + msg = _( + "The --{{ k }} parameter requires server support for " + "API microversion {{ min_mv }}" + ) + raise exceptions.CommandError(msg) +{%- endif %} {# microversion #} + new_props["{{ k }}"] = None +{%- endfor %} {#- for k,_ in attr.items #} + + if unset_props: + msg = _( + "Properties %s are not supported by {{ resource_name }}" + ) + raise exceptions.CommandError(msg % unset_props) + + resource = self.app.client_manager.sdk_connection.search_resources( + resource_type="{{ registry_name }}", + name_or_id=parsed_args.{{ resource_name }}, + filters=None + ) + + data = client.update_{{ resource_name }}( + resource.id, + **new_props) + + display_columns, columns = common._get_resource_columns(data) + data = utils.get_dict_properties( + data, columns, formatters=common._formatters) + + return (display_columns, data) + diff --git a/codegenerator/templates/osc/test_unit_create.py.j2 b/codegenerator/templates/osc/test_unit_create.py.j2 new file mode 100644 index 0000000..51beaef --- /dev/null +++ b/codegenerator/templates/osc/test_unit_create.py.j2 @@ -0,0 +1,130 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +from unittest import mock + +from openstack import utils as sdk_utils +from {{ sdk_mod_name }} import {{ class_name }} +from openstack.tests.unit import fakes as sdk_fakes + +from {{ osc_mod_name }} import create +from openstackclient.tests.unit import utils as tests_utils + + +class Test{{ class_name }}(tests_utils.TestCommand): + + def setUp(self): + super().setUp() + + # SDK mock + self.app.client_manager.sdk_connection = mock.Mock() + self.app.client_manager.sdk_connection.{{ sdk_service_name }} = mock.Mock() + self.sdk_client = self.app.client_manager.sdk_connection + self.app.client_manager.sdk_connection.{{ sdk_service_name }}.create_{{ resource_name }} = mock.Mock() + + +class Test{{ class_name }}Create(Test{{ class_name }}): + + _fake = sdk_fakes.get_fake_resource({{ class_name }}) + + columns = ( +{%- for (k, _) in attrs.items() %} + "{{ k }}", +{%- endfor %} + ) + + data = ( +{%- for (k, _) in attrs.items() %} + _fake.{{ k }}, +{%- endfor %} + ) + + def setUp(self): + super().setUp() + + self.api_mock = mock.Mock() + self.api_mock.side_effect = [self._fake ] + + self.sdk_client.{{ sdk_service_name }}.create_{{ resource_name }} = self.api_mock + + # Get the command object to test + self.cmd = create.Create{{ class_name }}(self.app, None) + + def test_create_no_options(self): + + arglist = [] + verifylist = [] + + # Missing required args should boil here + self.assertRaises( + tests_utils.ParserException, self.check_parser, + self.cmd, arglist, verifylist) + + @mock.patch.object(sdk_utils, 'supports_microversion', return_value=True) + def test_create_basic(self, mock_sm): + arglist = ["res_name", +{%- for (k, v) in attrs.items() if not v["attr"].read_only and k != "name" %} +{%- set attr=v['attr'] %} +{%- if attr.type.__name__ == "bool" %} + "--{{ k | replace('is_', '') }}", +{%- elif attr.type.__name__ == "str" or attr.type is none %} + "--{{ k | replace('_', '-') }}", "{{ k }}_val", +{%- elif attr.type.__name__ == "dict" %} + "--{{ k | replace('_', '-') }}", "{{ k }}_k={{ k }}_val", +{%- elif attr.type.__name__ == "list" %} + "--{{ k | replace('_', '-') }}", "{{ k }}_v", +{%- endif %} {#- if attr.type.__name__ #} +{%- endfor %} {#- for k,_ in attr.items #} + ] + verifylist = [ + ("name", "res_name"), +{%- for (k, v) in attrs.items() if not v["attr"].read_only and k != "name" %} +{%- set attr=v['attr'] %} +{%- if attr.type.__name__ == "bool" %} + ("{{ k | replace('is_', '') }}", True), +{%- elif attr.type.__name__ == "str" or attr.type is none %} + ("{{ k }}", "{{ k }}_val"), +{%- elif attr.type.__name__ == "dict" %} + ("{{ k | replace('_', '-') }}", {"{{ k }}_k": "{{ k }}_val"}), +{%- elif attr.type.__name__ == "list" %} + ("{{ k | replace('_', '-') }}", ["{{ k }}_v"]), +{%- endif %} {#- if attr.type.__name #} +{%- endfor %} {#- for k,_ in attr.items #} + ] + + parsed_args = self.check_parser(self.cmd, arglist, verifylist) + + # In base command class Lister in cliff, abstract method take action() + # returns a tuple containing the column names and an iterable + # containing the data to be listed. + columns, data = self.cmd.take_action(parsed_args) + + self.sdk_client.{{ sdk_service_name }}.create_{{ resource_name }}.assert_called_with( + name="res_name", +{%- for (k, v) in attrs.items() if not v["attr"].read_only and k != "name" %} +{%- set attr=v['attr'] %} +{%- if attr.type.__name__ == "bool" %} + {{ k }}=True, +{%- elif attr.type.__name__ == "str" or attr.type is none %} + {{ k }}="{{ k }}_val", +{%- elif attr.type.__name__ == "dict" %} + {{ k }}={"{{ k }}_k": "{{ k }}_val"}, +{%- elif attr.type.__name__ == "list" %} + {{ k }}=["{{ k }}_v"], +{%- endif %} {#- if attr.type == dict #} +{%- endfor %} {#- for k,_ in attr.items #} + ) + + self.assertEqual(self.columns, columns) + self.assertEqual(self.data, tuple(data)) + diff --git a/codegenerator/templates/osc/test_unit_delete.py.j2 b/codegenerator/templates/osc/test_unit_delete.py.j2 new file mode 100644 index 0000000..8e4c51c --- /dev/null +++ b/codegenerator/templates/osc/test_unit_delete.py.j2 @@ -0,0 +1,136 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +from unittest import mock + +from {{ sdk_mod_name }} import {{ class_name }} +from openstack import exceptions as sdk_exceptions +from openstack.tests.unit import fakes as sdk_fakes +from osc_lib import exceptions + +from {{ osc_mod_name }} import delete +from openstackclient.tests.unit import utils as tests_utils + + +class Test{{ class_name }}(tests_utils.TestCommand): + + def setUp(self): + super().setUp() + + # SDK mock + self.app.client_manager.sdk_connection = mock.Mock() + self.app.client_manager.sdk_connection.{{ sdk_service_name }} = mock.Mock() + self.sdk_client = self.app.client_manager.sdk_connection + self.app.client_manager.sdk_connection\ + .search_resources = mock.Mock() + self.app.client_manager.sdk_connection\ + .{{ sdk_service_name }}.delete_{{ resource_name }} = mock.Mock() + + +class Test{{ class_name }}Delete(Test{{ class_name }}): + + _fakes = sdk_fakes.get_fake_resources({{ class_name }}, 2) + + def setUp(self): + super().setUp() + + self.sdk_client.search_resources = mock.Mock() + + self.app.client_manager.sdk_connection.{{ sdk_service_name }}\ + .delete_{{ resource_name }}.return_value = None + + # Get the command object to test + self.cmd = delete.Delete{{ class_name }}(self.app, None) + + def test_delete(self): + arglist = [self._fakes[0].id] + verifylist = [("{{ resource_name }}", arglist)] + + parsed_args = self.check_parser(self.cmd, arglist, verifylist) + + self.sdk_client.search_resources.return_value = [ + self._fakes[0] + ] + + result = self.cmd.take_action(parsed_args) + + self.sdk_client.search_resources.assert_called_with( + resource_type="{{ registry_name }}", + name_or_id=self._fakes[0].id + ) + + self.sdk_client.{{ sdk_service_name }}\ + .delete_{{ resource_name }}.assert_called_with( + self._fakes[0].id + ) + self.assertIsNone(result) + + def test_delete_multiple(self): + arglist = [] + for item in self._fakes: + arglist.append(item.id) + verifylist = [("{{ resource_name }}", arglist)] + + parsed_args = self.check_parser(self.cmd, arglist, verifylist) + + self.sdk_client.search_resources.side_effect = [ + [i] for i in self._fakes + ] + + self.cmd.take_action(parsed_args) + + find_calls = [ + mock.call( + resource_type="{{ registry_name }}", + name_or_id=i.id + ) for i in self._fakes + ] + + delete_calls = [mock.call(i.id) for i in self._fakes] + self.sdk_client.search_resources.assert_has_calls( + find_calls) + self.sdk_client.{{ sdk_service_name }}.delete_{{ resource_name }}.assert_has_calls( + delete_calls) + + def test_delete_multiple_exception(self): + arglist = [self._fakes[0].id, "missing"] + verifylist = [("{{ resource_name }}", [self._fakes[0].id, "missing"])] + + parsed_args = self.check_parser(self.cmd, arglist, verifylist) + + self.sdk_client.search_resources.side_effect = [ + [self._fakes[0]], sdk_exceptions.ResourceNotFound + ] + + try: + self.cmd.take_action(parsed_args) + self.fail("Command should have failed") + except exceptions.CommandError as ex: + self.assertEqual("1 of 2 {{ resource_name }}s failed to delete.", str(ex)) + + find_calls = [ + mock.call( + resource_type="{{ registry_name }}", + name_or_id=self._fakes[0].id + ), + mock.call( + resource_type="{{ registry_name }}", + name_or_id="missing" + ), + ] + + delete_calls = [mock.call(self._fakes[0].id)] + self.sdk_client.search_resources.assert_has_calls( + find_calls) + self.sdk_client.{{ sdk_service_name }}.delete_{{ resource_name }}.assert_has_calls( + delete_calls) diff --git a/codegenerator/templates/osc/test_unit_list.py.j2 b/codegenerator/templates/osc/test_unit_list.py.j2 new file mode 100644 index 0000000..32d5962 --- /dev/null +++ b/codegenerator/templates/osc/test_unit_list.py.j2 @@ -0,0 +1,132 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +from unittest import mock + +from {{ sdk_mod_name }} import {{ class_name }} +from openstack.tests.unit import fakes as sdk_fakes +from openstack import utils as sdk_utils + +from {{ osc_mod_name }} import list +from openstackclient.tests.unit import utils as tests_utils + + +class Test{{ class_name }}(tests_utils.TestCommand): + + def setUp(self): + super().setUp() + + # SDK mock + self.app.client_manager.sdk_connection = mock.Mock() + self.app.client_manager.sdk_connection.{{ sdk_service_name }} = mock.Mock() + self.sdk_client = self.app.client_manager.sdk_connection + self.app.client_manager.sdk_connection.search_resources = mock.Mock() + + +class Test{{ class_name }}List(Test{{ class_name }}): + + _fake = sdk_fakes.get_fake_resource({{ class_name }}) + + columns = ( +{%- for (k, _) in attrs.items() %} + "{{ k | capitalize() }}", +{%- endfor %} + ) + + data = (( +{%- for (k, _) in attrs.items() %} + _fake.{{ k }}, +{%- endfor %} + ),) + + def setUp(self): + super().setUp() + + self.api_mock = mock.Mock() + self.api_mock.side_effect = [[self._fake], [], ] + + self.sdk_client.search_resources = self.api_mock + + # Get the command object to test + self.cmd = list.List{{ class_name }}(self.app, None) + + def test_list_no_options(self): + arglist = [] + verifylist = [] + + parsed_args = self.check_parser(self.cmd, arglist, verifylist) + + # In base command class Lister in cliff, abstract method take action() + # returns a tuple containing the column names and an iterable + # containing the data to be listed. + columns, data = self.cmd.take_action(parsed_args) + + self.sdk_client.search_resources.assert_called_with( + resource_type="{{ registry_name }}", + filters={}, + name_or_id=None + ) + + self.assertEqual(self.columns, columns) + self.assertEqual(self.data, tuple(data)) + + def test_list_all_options(self): + arglist = [ +{%- for (k, _) in res._query_mapping._mapping.items() %} +{%- set attr = attrs.get(k) %} +{%- if not k.startswith('is_') %} + "--{{ k | replace('_', '-') }}", "val_{{ k }}", +{%- else -%} {# if not k.startswith #} + "--{{ k | replace('is_', '') }}", +{%- endif -%} {# if not k.startswith #} +{%- endfor %} + ] + + verifylist = [ +{%- for (k, _) in res._query_mapping._mapping.items() %} +{%- set attr = attrs.get(k) %} +{%- if not k.startswith('is_') %} + ("{{ k }}", "val_{{ k }}"), +{%- else -%} {# if not k.startswith #} + ("{{ k | replace('is_', '') }}", True), +{%- endif -%} {# if not k.startswith #} +{%- endfor %} + ] + + parsed_args = self.check_parser(self.cmd, arglist, verifylist) + + # In base command class Lister in cliff, abstract method take action() + # returns a tuple containing the column names and an iterable + # containing the data to be listed. + with mock.patch.object( + sdk_utils, 'supports_microversion', return_value=True): + columns, data = self.cmd.take_action(parsed_args) + + # Set expected values + kwargs = { + "paginated": False, +{%- for (k, _) in res._query_mapping._mapping.items() %} +{%- if not k.startswith('is_') %} + "{{ k }}": "val_{{ k }}", +{%- else -%} {# if not k.startswith #} + "{{ k }}": True, +{%- endif -%} {# if not k.startswith #} +{%- endfor %} + } + + self.sdk_client.search_resources.assert_called_with( + resource_type="{{ registry_name }}", + filters=kwargs, + name_or_id=None, + ) + diff --git a/codegenerator/templates/osc/test_unit_set.py.j2 b/codegenerator/templates/osc/test_unit_set.py.j2 new file mode 100644 index 0000000..2fd1442 --- /dev/null +++ b/codegenerator/templates/osc/test_unit_set.py.j2 @@ -0,0 +1,134 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +from unittest import mock + +from openstack import utils as sdk_utils +from {{ sdk_mod_name }} import {{ class_name }} +from openstack.tests.unit import fakes as sdk_fakes + +from {{ osc_mod_name }} import set +from openstackclient.tests.unit import utils as tests_utils + + +class Test{{ class_name }}(tests_utils.TestCommand): + + def setUp(self): + super().setUp() + + # SDK mock + self.app.client_manager.sdk_connection = mock.Mock() + self.app.client_manager.sdk_connection.{{ sdk_service_name }} = mock.Mock() + self.app.client_manager.sdk_connection.{{ sdk_service_name }}.update_{{ resource_name }} = mock.Mock() + self.sdk_client = self.app.client_manager.sdk_connection + + +class Test{{ class_name }}Set(Test{{ class_name }}): + + _fake = sdk_fakes.get_fake_resource({{ class_name }}) + + columns = ( +{%- for (k, _) in attrs.items() %} + "{{ k }}", +{%- endfor %} + ) + + data = ( +{%- for (k, _) in attrs.items() %} + _fake.{{ k }}, +{%- endfor %} + ) + + def setUp(self): + super().setUp() + + self.sdk_client.search_resources.return_value = self._fake + self.sdk_client.{{ sdk_service_name }}.update_{{ resource_name }}.return_value = self._fake + + # Get the command object to test + self.cmd = set.Set{{ class_name }}(self.app, None) + + def test_set_no_options(self): + + arglist = [] + verifylist = [] + + # Missing required args should boil here + self.assertRaises( + tests_utils.ParserException, self.check_parser, + self.cmd, arglist, verifylist) + + @mock.patch.object(sdk_utils, 'supports_microversion', return_value=True) + def test_set_basic(self, mock_sm): + arglist = ["res_name", +{%- for (k, v) in attrs.items() if not v["attr"].read_only and not v["attr"].create_only %} +{%- set attr=v['attr'] %} +{%- if attr.type.__name__ == "bool" %} + "--{{ k | replace('is_', '') }}", +{%- elif attr.type.__name__ == "str" or attr.type is none %} + "--{{ k | replace('_', '-') }}", "{{ k }}_val", +{%- elif attr.type.__name__ == "dict" %} + "--{{ k | replace('_', '-') }}", "{{ k }}_k={{ k }}_val", +{%- elif attr.type.__name__ == "list" %} + "--{{ k | replace('_', '-') }}", "{{ k }}_v", +{%- endif %} {#- if attr.type.__name__ #} +{%- endfor %} {#- for k,_ in attr.items #} + ] + verifylist = [ + ("{{ resource_name }}", "res_name"), +{%- for (k, v) in attrs.items() if not v["attr"].read_only and not v["attr"].create_only %} +{%- set attr=v['attr'] %} +{%- if attr.type.__name__ == "bool" %} + ("{{ k | replace('is_', '') }}", True), +{%- elif attr.type.__name__ == "str" or attr.type is none %} + ("{{ k }}", "{{ k }}_val"), +{%- elif attr.type.__name__ == "dict" %} + ("{{ k | replace('_', '-') }}", {"{{ k }}_k": "{{ k }}_val"}), +{%- elif attr.type.__name__ == "list" %} + ("{{ k | replace('_', '-') }}", ["{{ k }}_v"]), +{%- endif %} {#- if attr.type.__name #} +{%- endfor %} {#- for k,_ in attr.items #} + ] + + parsed_args = self.check_parser(self.cmd, arglist, verifylist) + + # In base command class Lister in cliff, abstract method take action() + # returns a tuple containing the column names and an iterable + # containing the data to be listed. + columns, data = self.cmd.take_action(parsed_args) + + self.sdk_client.search_resources.assert_called_with( + resource_type="{{ registry_name }}", + filters=None, + name_or_id="res_name" + ) + + self.sdk_client.{{ sdk_service_name }}.update_{{ resource_name }}.assert_called_with( + self._fake.id, +{%- for (k, v) in attrs.items() if not v["attr"].read_only and not v["attr"].create_only %} +{%- set attr=v['attr'] %} +{%- if attr.type.__name__ == "bool" %} + {{ k }}=True, +{%- elif attr.type.__name__ == "str" or attr.type is none %} + {{ k }}="{{ k }}_val", +{%- elif attr.type.__name__ == "dict" %} + {{ k }}={"{{ k }}_k": "{{ k }}_val"}, +{%- elif attr.type.__name__ == "list" %} + {{ k }}=["{{ k }}_v"], +{%- endif %} {#- if attr.type == dict #} +{%- endfor %} {#- for k,_ in attr.items #} + ) + + self.assertEqual(self.columns, columns) + self.assertEqual(self.data, tuple(data)) + diff --git a/codegenerator/templates/osc/test_unit_show.py.j2 b/codegenerator/templates/osc/test_unit_show.py.j2 new file mode 100644 index 0000000..9736dd3 --- /dev/null +++ b/codegenerator/templates/osc/test_unit_show.py.j2 @@ -0,0 +1,93 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +from unittest import mock + +from {{ sdk_mod_name }} import {{ class_name }} +from openstack.tests.unit import fakes as sdk_fakes + +from {{ osc_mod_name }} import show +from openstackclient.tests.unit import utils as tests_utils + + +class Test{{ class_name }}(tests_utils.TestCommand): + + def setUp(self): + super().setUp() + + # SDK mock + self.app.client_manager.sdk_connection = mock.Mock() + self.app.client_manager.sdk_connection.{{ sdk_service_name }} = mock.Mock() + self.sdk_client = self.app.client_manager.sdk_connection + self.app.client_manager.sdk_connection.search_resources = mock.Mock() + + +class Test{{ class_name }}Show(Test{{ class_name }}): + + _fake = sdk_fakes.get_fake_resource({{ class_name }}) + + columns = ( +{%- for (k, _) in attrs.items() %} + "{{ k }}", +{%- endfor %} + ) + + data = ( +{%- for (k, _) in attrs.items() %} + _fake.{{ k }}, +{%- endfor %} + ) + + def setUp(self): + super().setUp() + + self.api_mock = mock.Mock() + self.api_mock.side_effect = [[self._fake], [], ] + + self.sdk_client.search_resources = self.api_mock + + # Get the command object to test + self.cmd = show.Show{{ class_name }}(self.app, None) + + def test_show_no_options(self): + + arglist = [] + verifylist = [] + + # Missing required args should boil here + self.assertRaises( + tests_utils.ParserException, self.check_parser, + self.cmd, arglist, verifylist) + + def test_show_basic(self): + arglist = ["res_id"] + verifylist = [ + ("{{ class_name | lower }}", "res_id") + ] + + parsed_args = self.check_parser(self.cmd, arglist, verifylist) + + # In base command class Lister in cliff, abstract method take action() + # returns a tuple containing the column names and an iterable + # containing the data to be listed. + columns, data = self.cmd.take_action(parsed_args) + + self.sdk_client.search_resources.assert_called_with( + resource_type="{{ registry_name }}", + filters=None, + name_or_id="res_id" + ) + + self.assertEqual(self.columns, columns) + self.assertEqual(self.data, tuple(data)) + diff --git a/codegenerator/templates/osc/test_unit_unset.py.j2 b/codegenerator/templates/osc/test_unit_unset.py.j2 new file mode 100644 index 0000000..a922c85 --- /dev/null +++ b/codegenerator/templates/osc/test_unit_unset.py.j2 @@ -0,0 +1,142 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +from unittest import mock + +from openstack import utils as sdk_utils +from {{ sdk_mod_name }} import {{ class_name }} +from openstack.tests.unit import fakes as sdk_fakes +from osc_lib import exceptions + +from {{ osc_mod_name }} import unset +from openstackclient.tests.unit import utils as tests_utils + + +class Test{{ class_name }}(tests_utils.TestCommand): + + def setUp(self): + super().setUp() + + # SDK mock + self.app.client_manager.sdk_connection = mock.Mock() + self.app.client_manager.sdk_connection.{{ sdk_service_name }} = mock.Mock() + self.app.client_manager.sdk_connection.{{ sdk_service_name }}.update_{{ resource_name }} = mock.Mock() + self.sdk_client = self.app.client_manager.sdk_connection + + +class Test{{ class_name }}Unset(Test{{ class_name }}): + + _fake = sdk_fakes.get_fake_resource({{ class_name }}) + + columns = ( +{%- for (k, _) in attrs.items() %} + "{{ k }}", +{%- endfor %} + ) + + data = ( +{%- for (k, _) in attrs.items() %} + _fake.{{ k }}, +{%- endfor %} + ) + + def setUp(self): + super().setUp() + + self.sdk_client.search_resources.return_value = self._fake + self.sdk_client.{{ sdk_service_name }}.update_{{ resource_name }}.return_value = self._fake + + # Get the command object to test + self.cmd = unset.Unset{{ class_name }}(self.app, None) + + def test_unset_no_options(self): + + arglist = [] + verifylist = [] + + # Missing required args should boil here + self.assertRaises( + tests_utils.ParserException, self.check_parser, + self.cmd, arglist, verifylist) + + def test_unset_unsupported_options(self): + + arglist = ["res_name", "--property", "fake-prop"] + verifylist = [ + ("{{ resource_name }}", "res_name"), + ("properties", ["fake-prop"]) + ] + parsed_args = self.check_parser(self.cmd, arglist, verifylist) + + # Unsupported propertiesshould boil here + self.assertRaises( + exceptions.CommandError, + self.cmd.take_action, + parsed_args + ) + + @mock.patch.object(sdk_utils, 'supports_microversion', return_value=True) + def test_unset_basic(self, mock_sm): + arglist = ["res_name", +{%- for (k, v) in attrs.items() if not v["attr"].read_only and not v["attr"].create_only %} + "--property", +{%- set attr=v['attr'] %} +{%- if attr.type.__name__ == "bool" %} + "{{ k | replace('is_', '') }}", +{%- else %} + "{{ k }}", +{%- endif %} +{%- endfor %} {#- for k,_ in attr.items #} + ] + verifylist = [ + ("{{ resource_name }}", "res_name"), + ("properties", [ +{%- for (k, v) in attrs.items() if not v["attr"].read_only and not v["attr"].create_only %} +{%- set attr=v['attr'] %} +{%- if attr.type.__name__ == "bool" %} + "{{ k | replace('is_', '') }}", +{%- else %} + "{{ k }}", +{%- endif %} {#- if attr.type.__name #} +{%- endfor %} {#- for k,_ in attr.items #} + ]), + ] + + parsed_args = self.check_parser(self.cmd, arglist, verifylist) + + # In base command class Lister in cliff, abstract method take action() + # returns a tuple containing the column names and an iterable + # containing the data to be listed. + columns, data = self.cmd.take_action(parsed_args) + + self.sdk_client.search_resources.assert_called_with( + resource_type="{{ registry_name }}", + filters=None, + name_or_id="res_name" + ) + + self.sdk_client.{{ sdk_service_name }}.update_{{ resource_name }}.assert_called_with( + self._fake.id, +{%- for (k, v) in attrs.items() if not v["attr"].read_only and not v["attr"].create_only %} +{%- set attr=v['attr'] %} +{%- if attr.type.__name__ == "bool" %} + {{ k }}=None, +{%- else %} + {{ k }}=None, +{%- endif %} {#- if attr.type == dict #} +{%- endfor %} {#- for k,_ in attr.items #} + ) + + self.assertEqual(self.columns, columns) + self.assertEqual(self.data, tuple(data)) + diff --git a/codegenerator/templates/rust_cli/impl.rs.j2 b/codegenerator/templates/rust_cli/impl.rs.j2 new file mode 100644 index 0000000..cf09316 --- /dev/null +++ b/codegenerator/templates/rust_cli/impl.rs.j2 @@ -0,0 +1,257 @@ +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// SPDX-License-Identifier: Apache-2.0 +// +// WARNING: This file is automatically generated from OpenAPI schema using +// `openstack-codegenerator`. + +//! {{ operation_type | title }} {{ target_class_name }} command +{%- if microversion %} [microversion = {{ microversion }}]{%- endif %} +//! +//! Wraps invoking of the `{{ url }}` with `{{ method|upper }}` method + +{% import 'rust_macros.j2' as macros with context -%} +use clap::Args; +use serde::{Deserialize, Serialize}; +use tracing::info; + +use anyhow::Result; + +use openstack_sdk::AsyncOpenStack; + +use crate::output::OutputProcessor; +use crate::Cli; +use crate::OutputConfig; +use crate::StructTable; +use crate::OpenStackCliError; + +{% for mod in additional_imports | sort %} +use {{ mod }}; +{%- endfor %} + +{{ macros.docstring(command_description) }} +#[derive(Args)] +{%- if command_summary %} +#[command(about = "{{ command_summary }}")] +{%- endif %} +pub struct {{ target_class_name }}Command { + /// Request Query parameters + #[command(flatten)] + query: QueryParameters, + + /// Path parameters + #[command(flatten)] + path: PathParameters, + +{% include "rust_cli/parameters.j2" %} + +{% if operation_type == "list" and "limit" in type_manager.get_parameters("query")|list|map(attribute=0) %} + /// Total limit of entities count to return. Use this when there are too many entries. + #[arg(long, default_value_t=10000)] + max_items: usize +{%- endif %} + +{% if operation_type == "download" %} + /// Destination filename (using "-" will print object to stdout) + #[arg(long)] + file: Option<String>, +{% elif operation_type == "upload" %} + /// Source filename (using "-" will read object from stdout) + #[arg(long)] + file: Option<String>, +{%- endif %} + +} +{% include "rust_cli/query_parameters.j2" %} +{% include "rust_cli/path_parameters.j2" %} + +{%- for type in type_manager.get_subtypes() %} +{%- if type["variants"] is defined %} +{{ macros.docstring(type.description, indent=0) }} +#[derive(Clone, Eq, Ord, PartialEq, PartialOrd, ValueEnum)] +enum {{ type.name }} { + {%- for k in type.variants.keys()|sort %} + {{ k }}, + {%- endfor %} +} + +{%- elif type["base_type"] == "struct" %} +/// {{ type.name }} Body data +#[derive(Args)] +{%- if type["is_group"] is defined and type.is_group %} +#[group(required={{ type.is_required | lower }}, multiple={{ "true" if type.__class__.__name__ != "EnumGroupStruct" else "false" }})] +{%- endif %} +struct {{ type.name }} { + {%- if type["is_group"] is defined and type.is_group %} + {%- for (_, field) in type.fields | dictsort %} + {{ macros.docstring(field.description, indent=4) }} + {{ field.clap_macros_ext(is_group=type.is_group) }} + {{ field.local_name }}: {{ field.type_hint }}, + {%- endfor %} + {%- else %} + {%- for field in type.fields.values() %} + {{ macros.docstring(field.description, indent=4) }} + {{ field.clap_macros_ext(is_group=type.is_group) }} + {{ field.local_name }}: {{ field.type_hint }}, + {%- endfor %} + {%- endif %} +} +{%- endif %} +{% endfor %} + +{%- include 'rust_cli/response_struct.j2' %} + +impl {{ target_class_name }}Command { + /// Perform command action + pub async fn take_action( + &self, + parsed_args: &Cli, + client: &mut AsyncOpenStack, + ) -> Result<(), OpenStackCliError> { + info!("{{ operation_type | title }} {{ target_class_name }}"); + + let op = OutputProcessor::from_args(parsed_args); + op.validate_args(parsed_args)?; + + {%- if operation_type == "download" and is_image_download %} + {%- include 'rust_cli/impl_image_download.j2' %} + {%- else %} + + {% if find_present and operation_type in ["show", "set", "download"] %} + {#- find the resource #} + let mut find_builder = find::{{ sdk_struct_name }}::builder(); + {{ macros.set_cli_path_parameters(type_manager, "find_builder", find_mode=True) }} + {%- if microversion %} + find_builder.header("OpenStack-API-Version", "{{ "volume" if service_type == "block-storage" else service_type }} {{ microversion }}"); + {%- endif %} + let find_ep = find_builder + .build() + .map_err(|x| OpenStackCliError::EndpointBuild(x.to_string()))?; + let find_data: serde_json::Value = find(find_ep).query_async(client).await?; + {%- endif %} + + {%- if not is_json_patch and (operation_type != "show" or not find_present) %} + let mut ep_builder = {{ sdk_mod_path[-1] }}::Request::builder(); + {%- if microversion %} + ep_builder.header("OpenStack-API-Version", "{{ "volume" if service_type == "block-storage" else service_type }} {{ microversion }}"); + {%- endif %} + {{ macros.set_cli_path_parameters(type_manager, "ep_builder") }} + {% include 'rust_cli/set_query_parameters.j2' %} + {% include 'rust_cli/set_body_parameters.j2' %} + + {%- if operation_type == "upload" and body_types|length == 1 and body_types[0] != "*/*" %} + // The only supported media type + ep_builder.header("content-type", "{{ body_types[0] }}"); + {%- endif %} + + let ep = ep_builder + .build() + .map_err(|x| OpenStackCliError::EndpointBuild(x.to_string()))?; + {%- endif %} + + {# Response #} + {%- with data_type = response_type_manager.get_root_data_type() %} + + + {%- if (data_type.__class__.__name__ == "StructResponse" and data_type.fields) or data_type.__class__.__name__ == "TupleStruct" or data_type.__class__.__name__ == "HashMapResponse" %} + {#- there is result structure meand we can render response #} + + {%- if operation_type == "list" %} + {% include 'rust_cli/invoke_list.j2' %} + + {%- elif operation_type in ["show"] %} + {#- Show/get implementation #} + {%- if find_present %} + op.output_single::<ResponseData>(find_data)?; + {%- else %} + let data = ep.query_async(client).await?; + op.output_single::<ResponseData>(data)?; + {%- endif %} + + {%- elif operation_type == "create" %} + {% include 'rust_cli/invoke_create.j2' %} + + {%- elif operation_type == "set" and method == "patch" and is_json_patch %} +{#- Patch implementation #} + {% include 'rust_cli/invoke_patch.j2' %} + {%- else %} + {%- if result_is_list %} + let data: Vec<serde_json::Value> = ep.query_async(client).await?; + op.output_list::<ResponseData>(data)?; + {%- else %} + let data = ep.query_async(client).await?; + op.output_single::<ResponseData>(data)?; + {%- endif %} + {%- endif %} + {%- elif operation_type not in ["delete", "download", "upload", "json"] %} + {#- there is no result structure - raw mode #} + let _rsp: Response<Bytes> = ep.raw_query_async(client).await?; + + {%- if resource_header_metadata %} + {#- metadata from headers for now can be only returned when there is no response struct #} + let mut metadata: HashMap<String, String> = HashMap::new(); + let headers = _rsp.headers(); + + let mut regexes: Vec<Regex> = vec![ + {%- for hdr, spec in resource_header_metadata.items() %} + {%- if "*" in hdr %} + Regex::new(r"(?i){{ hdr | replace("*", "\.*") }}").unwrap(), + {%- endif %} + {%- endfor %} + ]; + + for (hdr, val) in headers.iter() { + if [ + {%- for hdr, spec in resource_header_metadata.items() %} + {%- if not "*" in hdr %} + "{{ hdr | lower }}", + {%- endif %} + {%- endfor %} + ].contains(&hdr.as_str()) { + metadata.insert( + hdr.to_string(), + val.to_str().unwrap_or_default().to_string()); + } else if !regexes.is_empty() { + for rex in regexes.iter() { + if rex.is_match(hdr.as_str()) { + metadata.insert( + hdr.to_string(), + val.to_str().unwrap_or_default().to_string()); + } + } + } + } + let data = ResponseData {metadata: metadata.into()}; + {%- else %} + let data = ResponseData {}; + {%- endif %} + // Maybe output some headers metadata + op.output_human::<ResponseData>(&data)?; + {%- elif operation_type == "delete" %} + let _rsp: Response<Bytes> = ep.raw_query_async(client).await?; + {%- elif operation_type == "download" %} + {%- include 'rust_cli/invoke_download.j2' %} + + {%- elif operation_type == "upload" %} + {%- include 'rust_cli/invoke_upload.j2' %} + {%- elif operation_type == "json" %} + let rsp: Response<Bytes> = ep.raw_query_async(client).await?; + let data: serde_json::Value = serde_json::from_slice(rsp.body())?; + op.output_machine(data)?; + {%- endif %} + +{%- endwith %} +{%- endif %} {#- specialities #} + Ok(()) + } +} diff --git a/codegenerator/templates/rust_cli/impl_image_download.j2 b/codegenerator/templates/rust_cli/impl_image_download.j2 new file mode 100644 index 0000000..079890e --- /dev/null +++ b/codegenerator/templates/rust_cli/impl_image_download.j2 @@ -0,0 +1,28 @@ + let find_ep = find::Request::builder() + .id(&self.path.image_id) + .build() + .map_err(|x| OpenStackCliError::EndpointBuild(x.to_string()))?; + let image_data: serde_json::Value = find(find_ep).query_async(client).await?; + + let image_id = image_data["id"] + .as_str() + .expect("Image ID is a string") + .to_string(); + let image_name = image_data["name"] + .as_str() + .expect("Image name is a string") + .to_string(); + + let ep = download::Request::builder() + .image_id(image_id) + .build() + .map_err(|x| OpenStackCliError::EndpointBuild(x.to_string()))?; + let (headers, data) = ep.download_async(client).await?; + + let size: u64 = headers + .get("content-length") + .map(|x| x.to_str().expect("Header is a string")) + .unwrap_or("0") + .parse() + .unwrap(); + download_file(self.file.clone().unwrap_or(image_name), size, data).await?; diff --git a/codegenerator/templates/rust_cli/invoke_create.j2 b/codegenerator/templates/rust_cli/invoke_create.j2 new file mode 100644 index 0000000..106cebd --- /dev/null +++ b/codegenerator/templates/rust_cli/invoke_create.j2 @@ -0,0 +1,3 @@ +{#- Create operation handling #} + let data = ep.query_async(client).await?; + op.output_single::<ResponseData>(data)?; diff --git a/codegenerator/templates/rust_cli/invoke_download.j2 b/codegenerator/templates/rust_cli/invoke_download.j2 new file mode 100644 index 0000000..4edb2b2 --- /dev/null +++ b/codegenerator/templates/rust_cli/invoke_download.j2 @@ -0,0 +1,15 @@ + let (headers, data) = ep.download_async(client).await?; + + let size: u64 = headers + .get("content-length") + .map(|x| x.to_str().expect("Header is a string")) + .unwrap_or("0") + .parse() + .unwrap(); + download_file( + self.file.clone().unwrap_or(self.{{ last_path_parameter.name }}.clone()), + size, + data, + ) + .await?; + diff --git a/codegenerator/templates/rust_cli/invoke_list.j2 b/codegenerator/templates/rust_cli/invoke_list.j2 new file mode 100644 index 0000000..ceef01b --- /dev/null +++ b/codegenerator/templates/rust_cli/invoke_list.j2 @@ -0,0 +1,15 @@ +{#- List operation #} +{%- if data_type.__class__.__name__ in ["StructResponse", "TupleStruct"] %} + {%- if is_list_paginated %} + {#- paginated list #} + let data: Vec<serde_json::Value> = paged(ep, Pagination::Limit(self.max_items)).query_async(client).await?; + {%- else %} + let data: Vec<serde_json::Value> = ep.query_async(client).await?; + {%- endif %} + + op.output_list::<ResponseData>(data)?; + +{%- elif data_type.__class__.__name__ == "HashMapResponse" %} + let data = ep.query_async(client).await?; + op.output_single::<ResponseData>(data)?; +{%- endif %} diff --git a/codegenerator/templates/rust_cli/invoke_patch.j2 b/codegenerator/templates/rust_cli/invoke_patch.j2 new file mode 100644 index 0000000..c0f1add --- /dev/null +++ b/codegenerator/templates/rust_cli/invoke_patch.j2 @@ -0,0 +1,70 @@ +{#- Patch implementation #} +{% with root = type_manager.get_root_data_type() %} + // Patching resource requires fetching and calculating diff + let resource_id = find_data["id"] + .as_str() + .expect("Resource ID is a string") + .to_string(); + + let data: ResponseData = serde_json::from_value(find_data)?; + let mut new = data.clone(); + + {%- for attr_name, field in root.fields.items() %} + {%- if attr_name != "id" %}{# glance doesn't hide "ID" from change #} + if let Some(val) = &self.{{ field.local_name }} { + {%- if field.type_hint == "Option<Vec<String>>" %} + new.{{ field.local_name }} = Some(serde_json::from_value(val.to_owned().into())?); + + {%- elif field.type_hint == "Option<Vec<Value>>" %} + new.{{ field.local_name }} = Some(serde_json::from_value(val.to_owned().into())?); + + {%- elif field.type_hint in ["Option<bool>", "Option<i32>", "Option<f32>", "Option<i64>", "Option<f64>", "bool", "i32", "u32", "f32", "i64", "f64"]%} + new.{{ field.local_name }} = Some(*val); + + {%- elif field.data_type.__class__.__name__ in ["StringEnum"]%} + // StringEnum + let tmp = match val { + {%- for variant in field.data_type.variants.keys() | sort %} + {#- normally we should use the cli enum name, but we don't have it here and names must match anyway#} + {{ field.data_type.name }}::{{ variant }} => { + "{{ variant | lower }}" + } + {%- endfor %} + }; + new.{{ field.local_name }} = Some(tmp.to_string()); + + {%- elif "Option" in field.type_hint %} + new.{{ field.local_name }} = Some(val.into()); + + {%- else %} + new.{{ field.local_name }} = *val; + {%- endif %} + } + {%- endif %} + {%- endfor %} + + let curr_json = serde_json::to_value(&data).unwrap(); + let mut new_json = serde_json::to_value(&new).unwrap(); + + {%- if root.additional_fields_type %} +{#- additional properties are not present in the output and thus handleded on the raw json #} + if let Some(properties) = &self.properties { + for (key, val) in properties { + new_json[key] = json!(val); + } + } + {%- endif %} + + let patch = diff(&curr_json, &new_json); + + let mut patch_ep_builder = {{ method }}::{{ sdk_struct_name }}::builder(); + patch_ep_builder.id(&resource_id); + patch_ep_builder.patch(patch); + + let patch_ep = patch_ep_builder + .build() + .map_err(|x| OpenStackCliError::EndpointBuild(x.to_string()))?; + let new_data = patch_ep.query_async(client).await?; + op.output_single::<ResponseData>(new_data)?; + +{%- endwith %} diff --git a/codegenerator/templates/rust_cli/invoke_upload.j2 b/codegenerator/templates/rust_cli/invoke_upload.j2 new file mode 100644 index 0000000..15edb73 --- /dev/null +++ b/codegenerator/templates/rust_cli/invoke_upload.j2 @@ -0,0 +1,5 @@ + let dst = self.file.clone(); + let data = build_upload_asyncread(dst).await?; + + let _rsp: Response<Bytes> = ep.raw_query_read_body_async(client, data).await?; + // TODO: what if there is an interesting response diff --git a/codegenerator/templates/rust_cli/parameters.j2 b/codegenerator/templates/rust_cli/parameters.j2 new file mode 100644 index 0000000..e71c52e --- /dev/null +++ b/codegenerator/templates/rust_cli/parameters.j2 @@ -0,0 +1,26 @@ +{%- with type = type_manager.get_root_data_type() %} +{%- if type["fields"] is defined %} +{#- Structure #} + {%- for field in type.fields.values() %} + {%- if operation_type == "set" and field.local_name == "id" %} + {%- else %} + {{ field.clap_macros }} + {{ field.local_name }}: {{ field.type_hint }}, + {%- endif %} + {%- endfor %} + {%- if type.additional_fields_type %} + /// Additional properties to be sent with the request + #[arg(long="property", value_name="key=value", value_parser=parse_key_val::<String, {{ type.additional_fields_type.type_hint }}>)] + properties: Option<Vec<(String, {{ type.additional_fields_type.type_hint }})>>, + {%- endif %} + +{%- elif type.__class__.__name__ == "DictionaryInput" %} + {%- if type.value_type.__class__.__name__ != "Option" %} + #[arg(long="property", value_name="key=value", value_parser=parse_key_val::<String, {{ type.value_type.type_hint }}>)] + {%- else %} + #[arg(long="property", value_name="key=value", value_parser=parse_key_val_opt::<String, {{ type.value_type.item_type.type_hint }}>)] + {%- endif %} + properties: Option<Vec<(String, {{ type.value_type.type_hint }})>>, + +{%- endif %} +{%- endwith %} diff --git a/codegenerator/templates/rust_cli/path_parameters.j2 b/codegenerator/templates/rust_cli/path_parameters.j2 new file mode 100644 index 0000000..0e65183 --- /dev/null +++ b/codegenerator/templates/rust_cli/path_parameters.j2 @@ -0,0 +1,12 @@ + +/// Path parameters +#[derive(Args)] +struct PathParameters { +{%- for param in type_manager.parameters.values() %} +{%- if param.location == "path"%} + {{ macros.docstring(param.description, indent=4) }} + {{ param.clap_macros }} + {{ param.local_name }}: {{ param.type_hint }}, +{%- endif %} +{%- endfor %} +} diff --git a/codegenerator/templates/rust_cli/query_parameters.j2 b/codegenerator/templates/rust_cli/query_parameters.j2 new file mode 100644 index 0000000..804c40e --- /dev/null +++ b/codegenerator/templates/rust_cli/query_parameters.j2 @@ -0,0 +1,12 @@ + +/// Query parameters +#[derive(Args)] +struct QueryParameters { +{%- for param in type_manager.parameters.values() %} +{%- if param.location == "query" %} + {{ macros.docstring(param.description, indent=4) }} + {{ param.clap_macros }} + {{ param.local_name}}: {{ param.type_hint }}, +{%- endif %} +{%- endfor %} +} diff --git a/codegenerator/templates/rust_cli/response_struct.j2 b/codegenerator/templates/rust_cli/response_struct.j2 new file mode 100644 index 0000000..d4a6f01 --- /dev/null +++ b/codegenerator/templates/rust_cli/response_struct.j2 @@ -0,0 +1,171 @@ +{%- import 'rust_macros.j2' as macros with context -%} +{%- with data_type = response_type_manager.get_root_data_type() %} +{%- if data_type.__class__.__name__ == "StructResponse" %} + {%- if data_type.fields %} + /// {{ target_class_name }} response representation + #[derive(Deserialize, Serialize)] + #[derive(Clone, StructTable)] + struct ResponseData { + {%- for k, v in data_type.fields.items() %} + {% if not (operation_type == "list" and k in ["links"]) %} + {{ macros.docstring(v.description, indent=4) }} + {{ v.serde_macros }} + {{ v.get_structable_macros(data_type, sdk_service_name, resource_name, operation_type) }} + {{ v.local_name }}: {{ v.type_hint }}, + {%- endif %} + {%- endfor %} + + } + {%- else %} + {#- No response data at all #} + /// {{ target_class_name }} response representation + #[derive(Deserialize, Serialize)] + #[derive(Clone, StructTable)] + struct ResponseData {} + {%- endif %} + +{%- elif data_type.__class__.__name__ == "TupleStruct" %} + {#- tuple struct requires custom implementation of StructTable #} + /// {{ target_class_name }} response representation + #[derive(Deserialize, Serialize)] + #[derive(Clone)] + struct ResponseData( + {%- for field in data_type.tuple_fields %} + {{ field.type_hint }}, + {%- endfor %} + ); + + impl StructTable for ResponseData { + fn build(&self, _: &OutputConfig) -> (Vec<String>, + Vec<Vec<String>>) { + let headers: Vec<String> = Vec::from(["Value".to_string()]); + let res: Vec<Vec<String>> = Vec::from([Vec::from([self.0. + to_string()])]); + (headers, res) + } + } + + impl StructTable for Vec<ResponseData> { + fn build(&self, _: &OutputConfig) -> (Vec<String>, + Vec<Vec<String>>) { + let headers: Vec<String> = Vec::from(["Values".to_string()]); + let res: Vec<Vec<String>> = + Vec::from([Vec::from([self.into_iter().map(|v| v.0. + to_string()).collect::<Vec<_>>().join(", ")])]); + (headers, res) + } + } + +{%- elif data_type.__class__.__name__ == "HashMapResponse" %} + /// Response data as HashMap type + #[derive(Deserialize, Serialize)] + struct ResponseData(HashMap<String, {{ data_type.value_type.type_hint }}>); + + impl StructTable for ResponseData { + fn build(&self, _options: &OutputConfig) -> (Vec<String>, Vec<Vec<String>>) { + let headers: Vec<String> = Vec::from(["Name".to_string(), "Value".to_string()]); + let mut rows: Vec<Vec<String>> = Vec::new(); + rows.extend( + self.0 + .iter() + {%- if data_type.value_type.type_hint == "Value" %} + .map(|(k, v)| Vec::from([k.clone(), serde_json::to_string(&v).expect("Is a valid data")])), + {%- elif data_type.value_type.type_hint == "String" %} + .map(|(k, v)| Vec::from([k.clone(), v.clone()])), + {%- elif data_type.value_type.__class__.__name__ == "Option" %} + .map(|(k, v)| Vec::from([k.clone(), v.clone().unwrap_or("".to_string()).to_string()])), + {%- else %} + .map(|(k, v)| Vec::from([k.clone(), v.to_string()])), + {%- endif %} + ); + (headers, rows) + } + } + +{%- endif %} +{%- endwith %} + +{%- for subtype in response_type_manager.get_subtypes() %} +{%- if subtype["fields"] is defined %} +/// `{{ subtype.base_type }}` response type +#[derive(Default)] +#[derive(Clone)] +#[derive(Deserialize, Serialize)] +{{ subtype.base_type }} {{ subtype.name }} { + {%- for k, v in subtype.fields.items() %} + {{ v.local_name }}: {{ v.type_hint }}, + {%- endfor %} +} + +impl fmt::Display for {{ subtype.name }} { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let data = Vec::from([ + {%- for k, v in subtype.fields.items() %} + format!( + "{{v.local_name}}={}", + self + .{{ v.local_name }} + {%- if v.type_hint.startswith("Option") %} + {%- if v.type_hint not in ["Option<i32>", "Option<i64>", "Option<f32>", "Option<f64>", "Option<bool>"] %} + .clone() + {%- endif %} + .map(|v| v.to_string()) + .unwrap_or("".to_string()) + {%- endif %} + ), + {%- endfor %} + ]); + write!( + f, + "{}", + data + .join(";") + ) + } +} + +{%- elif subtype.base_type == "vec" %} +/// Vector of `{{ subtype.item_type.type_hint}}` response type +#[derive(Default)] +#[derive(Clone)] +#[derive(Deserialize, Serialize)] +struct Vec{{ subtype.item_type.type_hint}}(Vec<{{subtype.item_type.type_hint}}>); +impl fmt::Display for Vec{{ subtype.item_type.type_hint }} { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "[{}]", + self.0 + .iter() + .map(|v| v.to_string() ) + .collect::<Vec<String>>() + .join(",") + ) + } +} + +{%- elif subtype.base_type == "dict" %} +/// HashMap of `{{ subtype.value_type.type_hint }}` response type +#[derive(Default)] +#[derive(Clone)] +#[derive(Deserialize, Serialize)] +struct {{ subtype.type_hint }}(HashMap<String, {{ subtype.value_type.type_hint }}>); +impl fmt::Display for {{ subtype.type_hint }} { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!( + f, + "{{ '{{{}}}' }}", + self.0 + .iter() + {%- if subtype.value_type.__class__.__name__ == "Option" %} + .map(|v| format!("{}={}", v.0, v.1.clone().unwrap_or("".to_string()))) + {%- else %} + .map(|v| format!("{}={}", v.0, v.1)) + {%- endif %} + .collect::<Vec<String>>() + .join("\n") + ) + } +} +{%- endif %} +{%- endfor %} diff --git a/codegenerator/templates/rust_cli/set_body_parameters.j2 b/codegenerator/templates/rust_cli/set_body_parameters.j2 new file mode 100644 index 0000000..d717bbe --- /dev/null +++ b/codegenerator/templates/rust_cli/set_body_parameters.j2 @@ -0,0 +1,70 @@ +{% import 'rust_macros.j2' as macros with context -%} +{% if not is_json_patch -%} + // Set body parameters +{%- with root = type_manager.get_root_data_type() %} +{%- if root.__class__.__name__ == "StructInput" %} + {%- for root_attr, root_field in root.fields.items() %} + // Set Request.{{ root_field.remote_name }} data + {%- if root_field.is_optional %} + if let Some(args) = &self.{{ root_field.local_name }} { + {%- else %} + let args = &self.{{ root_field.local_name }}; + {%- endif %} + + {%- if root_field.data_type.__class__.__name__ == "StructInput" %} + {%- set builder_name = root_field.local_name + "_builder" %} + let mut {{ builder_name }} = {{ sdk_mod_path[-1] }}::{{ root_field.data_type.name }}Builder::default(); + + {%- for k, v in root_field.data_type.fields.items() %} + {%- if v.is_optional %} + if let Some(val) = &args.{{ v.local_name }} { + {{ macros.set_request_data_from_input(builder_name, v, "val") }} + } + {%- elif v.data_type.format is defined and v.data_type.format == "password" %} + if let Some(val) = &args.{{ v.local_name }} { + {{ builder_name }}.{{ v.remote_name }}(val); + } else { + let secret = Password::new() + {%- if v.description %} + .with_prompt("{{ v.description|trim |trim('.')}}") + {%- else %} + .with_prompt("{{ k }}") + {%- endif %} + .interact() + .unwrap(); + {{ builder_name }}.{{ v.remote_name }}(secret.to_string()); + } + {%- else %} + {{ macros.set_request_data_from_input(builder_name, v, "&args." + v.local_name) }} + {%- endif %} + + {% endfor %} + ep_builder.{{ root_field.remote_name }}({{ builder_name }}.build().unwrap()); + + {%- else %} + {{ macros.set_request_data_from_input("ep_builder", root_field, "args") }} + {%- endif %} + + {% if root_field.is_optional %} + } + {%- endif %} + {% endfor %} + {%- if root.additional_fields_type %} + if let Some(properties) = &self.properties { + ep_builder.properties(properties.iter().cloned()); + } + {%- endif %} +{%- elif root.__class__.__name__ == "DictionaryInput" %} + if let Some(properties) = &self.properties { + {%- if root.value_type.__class__.__name__ == "Option" %} + ep_builder.properties(properties + .into_iter() + .map(|(k, v)| (k, v.as_ref().map(Into::into))), + ); + {%- else %} + ep_builder.properties(properties.iter().cloned()); + {%- endif %} + } +{%- endif %} +{%- endwith %} +{%- endif %} diff --git a/codegenerator/templates/rust_cli/set_path_parameters.j2 b/codegenerator/templates/rust_cli/set_path_parameters.j2 new file mode 100644 index 0000000..b4c0a48 --- /dev/null +++ b/codegenerator/templates/rust_cli/set_path_parameters.j2 @@ -0,0 +1,18 @@ + // Set path parameters +{%- for (k, v) in type_manager.get_parameters("path") %} +{%- if not v.is_required %} + {%- if k != "project_id" %} + if let Some(val) = &self.path.{{ v.local_name }} { + ep_builder.{{ v.local_name }}(val); + } + {%- else %} + if let Some(val) = &self.path.{{ v.local_name }} { + ep_builder.{{ v.local_name }}(val); + } else { + ep_builder.{{ v.local_name }}(client.get_current_project().expect("Project ID must be known").id); + } + {%- endif %} +{%- else %} + ep_builder.{{ v.local_name }}(&self.path.{{ v.local_name }}); +{%- endif %} +{%- endfor %} diff --git a/codegenerator/templates/rust_cli/set_query_parameters.j2 b/codegenerator/templates/rust_cli/set_query_parameters.j2 new file mode 100644 index 0000000..1d77682 --- /dev/null +++ b/codegenerator/templates/rust_cli/set_query_parameters.j2 @@ -0,0 +1,23 @@ +{%- import 'rust_macros.j2' as macros with context -%} + // Set query parameters +{%- for (k, v) in type_manager.get_parameters("query") %} +{%- if v.data_type.__class__.__name__ == "BooleanFlag" and v.data_type.original_data_type.__class__.__name__ == "Null" %} + {%- if v.is_required %} + if self.query.{{ v.local_name }} { + ep_builder.{{ v.remote_name }}(serde_json::Value::Null); + + } + {%- else %} + if let Some(true) = self.query.{{ v.local_name }} { + ep_builder.{{ v.remote_name }}(serde_json::Value::Null); + + } + {%- endif %} +{%- elif not v.is_required %} + if let Some(val) = &self.query.{{ v.local_name }} { + {{ macros.set_request_data_from_input("ep_builder", v, "val")}} + } +{%- else %} + {{ macros.set_request_data_from_input("ep_builder", v, "&self.query." + v.local_name )}} +{%- endif %} +{%- endfor %} diff --git a/codegenerator/templates/rust_macros.j2 b/codegenerator/templates/rust_macros.j2 new file mode 100644 index 0000000..f9233e0 --- /dev/null +++ b/codegenerator/templates/rust_macros.j2 @@ -0,0 +1,353 @@ +{%- macro mod_docstring(v) %} +{%- if v %} +//! {{ v | wrap_markdown(75) | replace('\n', '\n//! ') }} +{%- endif %} +{%- endmacro %} + +{%- macro docstring(doc, indent=0) %} +{#- docstring for an element #} +{%- if doc %} +{{ (' ' * indent) }}/// {{ doc | trim("\n") | wrap_markdown(79-indent-4) | replace('\n', '\n' + (' ' * indent) + '/// ') }} +{%- endif %} +{%- endmacro %} + +{%- macro serde_args(k, attr) -%} +{%- set serde_args=[ +('rename = "' + attr.name + '"') if attr.name != k else None, +'deserialize_with="deser_ok_or_default"' if attr.default is not none and (attr.min_version is none and +attr.max_version is none) else None, +'default' if (attr.min_version is not none or +attr.max_version is not none) else None +] -%} +{{ serde_args | reject("none") | join(',') }} +{%- endmacro %} + +{%- macro arg_raw_type(attr) %} +{%- if attr.type.__name__ == "str" or attr.type is none -%} +String +{%- elif attr.type.__name__ == "int" -%} +i32 +{%- elif attr.type.__name__ == "float" -%} +f32 +{%- elif attr.type.__name__ == "bool" -%} +bool +{%- elif attr.type.__name__ == "dict" -%} +HashMapStringString +{%- endif %} +{%- endmacro %} + +{%- macro arg_type(k, attr) %} +{%- if attr.min_version is not none or attr.max_version is not none -%} +Option +{%- endif -%} +{{ arg_raw_type(attr) }} +{%- endmacro %} + +{%- macro struct_field(k, v) %} +{%- set attr=v['attr'] %} +{%- if attr.type is none or attr.type.__name__ in +["str", "int", "float", "bool", "dict"] +%} + {{ docstring(v) }} + #[structable()] + #[serde({{ serde_args(k, attr) }})] + {{ k }}: {{ arg_type(k, attr) }}, +{%- endif %} +{%- endmacro %} + +{%- macro cli_arg_params(params) %} +{%- for param in params %} +{%- if param.schema.type != "null" %} +{{ docstring(param.description, indent=4) }} +{%- for macros in param.param_macros %} + {{ macros }} +{%- endfor %} + {{ param.local_name }}: {{ param.type }}, +{%- endif %} +{%- endfor %} +{%- endmacro %} + +{%- macro sdk_builder_setter_btreemap(field) %} + {%- set is_opt = False if field.data_type.__class__.__name__ != "Option" else True %} + {%- set dt = field.data_type if not is_opt else field.data_type.item_type %} + {{ docstring(field.description, indent=4) }} + pub fn {{ field.local_name }}<I, K, V>(&mut self, iter: I) -> &mut Self + where + I: Iterator<Item = (K, V)>, + K: Into<Cow<'a, str>>, + V: Into<{{ dt.value_type.type_hint }}>, + { + self.{{ field.local_name }} + {%- if field.is_optional %} + .get_or_insert(None) + {%- endif %} + {%- if is_opt %} + .get_or_insert(None) + {%- endif %} + .get_or_insert_with(BTreeMap::new) + .extend(iter.map(|(k, v)| (k.into(), v.into()))); + self + } +{%- endmacro %} + +{%- macro sdk_builder_setter_btreeset(field) %} + {%- set is_opt = False if field.data_type.__class__.__name__ != "Option" else True %} + {%- set dt = field.data_type if not is_opt else field.data_type.item_type %} + {{ macros.docstring(field.description, indent=4) }} + pub fn {{ field.local_name }}<I, T>(&mut self, iter: I) -> &mut Self + where + I: Iterator<Item = T>, + V: Into<{{ dt.item_type.type_hint }}>, + { + self.{{ field.local_name }} + .get_or_insert_with(BTreeSet::new) + .extend(iter.map(Into::into)); + self + } +{%- endmacro %} + +{#- Create DeriveBuilder setter method #} +{%- macro sdk_builder_setter(field) %} + {%- set dtc = field.data_type.__class__.__name__ %} + {%- set subdtc = field.data_type.item_type.__class__.__name__ %} + {%- if dtc == "BTreeMap" or subdtc == "BTreeMap" %} + {{ sdk_builder_setter_btreemap(field) }} + {%- elif dtc == "BTreeSet" or subdtc == "BTreeSet" %} + {{ sdk_builder_setter_btreeset(field) }} + {%- endif %} +{%- endmacro %} + +{%- macro wrap_optional(val, is_nullable) %} +{%- if is_nullable is defined and is_nullable -%} +Some({{ val }}) +{%- else -%} +{{ val }} +{%- endif -%} +{%- endmacro %} + +{#- Macros to render setting Request data from CLI input #} +{%- macro set_request_data_from_input(dst_var, param, val_var) %} +{%- set is_nullable = param.is_nullable if param.is_nullable is defined else False %} +{%- if param.type_hint in ["Option<Option<bool>>", "Option<Option<i32>>", "Opeion<Option<i64>>"] %} + {{ dst_var }}.{{ param.remote_name }}({{ "*" + val_var }}); +{%- elif param.type_hint in ["Option<i32>", "Option<i64>", "Option<f32>", "Option<f64>", "Option<bool>"] %} + {{ dst_var }}.{{ param.remote_name }}({{ "*" + val_var }}); +{%- elif param.type_hint in ["i32", "i64", "f32", "f64", "bool"] %} + {{ dst_var }}.{{ param.remote_name }}({{ val_var | replace("&", "" )}}); +{%- elif param.data_type.__class__.__name__ in ["ArrayInput"] %} + {{ sdk_plain_array_setter(param, val_var.replace("&", ""), dst_var) }} +{%- elif param.data_type.__class__.__name__ in ["JsonValue"] %} + {{ dst_var }}.{{ param.remote_name }}({{ val_var | replace("&", "" )}}.clone()); +{%- elif param.data_type.__class__.__name__ == "DictionaryInput" %} + {%- if param.data_type.value_type.__class__.__name__ == "Option" %} + {{ dst_var }}.{{ param.remote_name }}({{ val_var | replace("&", "") }}.iter().cloned().map(|(k, v)| (k, v.map(Into::into)))); + {%- else %} + {{ dst_var }}.{{ param.remote_name }}({{ val_var }}.iter().cloned()); + {%- endif %} +{%- elif param.data_type.__class__.__name__ == "StringEnum" %} + let tmp = match {{ val_var }} { + {%- for variant in param.data_type.variants.keys() | sort %} + {#- normally we should use the cli enum name, but we don't have it here and names must match anyway#} + {{ param.data_type.name }}::{{ variant }} => { + {{ sdk_mod_path[-1] }}::{{ param.data_type.name }}::{{ variant }} + } + {%- endfor %} + }; + {{ dst_var }}.{{ param.remote_name }}(tmp); +{%- elif param.data_type.__class__.__name__ == "EnumGroupStruct" %} +{#- This is a reverse action of Complex SDK enum being converted into the CLI group #} + {%- for k, v in param.data_type.fields.items() %} + {%- if v.data_type.__class__.__name__ in ["Boolean", "BooleanFlag"] %} + if {{ val_var | replace("&", "") }}.{{ v.local_name }} { + {{ dst_var }}.{{ param.remote_name }}( + {{ sdk_mod_path[-1] }}::{{ v.sdk_parent_enum_variant }}( + {{ sdk_mod_path[-1] }}::{{ v.remote_name }} + ) + ); + } + {%- elif v.data_type.__class__.__name__ == "ArrayInput" %} + {% set original_type = v.data_type.original_item_type %} + if let Some(data) = {{ val_var }}.{{ v.local_name }} { + {{ sdk_enum_array_setter(param, v, "data", dst_var) }} + } + {%- endif %} + {%- endfor %} +{%- elif param.data_type.__class__.__name__ == "StructInput" %} + {% set builder_name = param.local_name + "_builder" %} + let mut {{ builder_name }} = {{ sdk_mod_path[-1] }}::{{ param.data_type.name }}Builder::default(); + {%- for k, v in param.data_type.fields.items() %} + {%- if v.is_optional %} + if let Some(val) = &{{ val_var }}.{{ v.local_name }} { + {{ set_request_data_from_input(builder_name, v, "val") }} + } + {%- else %} + {{ set_request_data_from_input(builder_name, v, "&" + val_var + "." + v.local_name) }} + {%- endif %} + + {%- endfor %} + {{ dst_var }}.{{ param.remote_name }}({{ builder_name }}.build().expect("A valid object")); +{%- elif param.data_type.__class__.__name__ == "String" %} + {%- if is_nullable and not param.is_optional %} + {{ dst_var }}.{{ param.remote_name }}({{ val_var | replace("&", "") }}.clone()); + {%- elif is_nullable and param.is_optional %} + {{ dst_var }}.{{ param.remote_name }}(Some({{ val_var }}.into())); + {%- elif (param.is_optional is defined and param.is_optional) or (param.is_required is defined and not param.is_required) %} + {{ dst_var }}.{{ param.remote_name }}({{ val_var }}); + {%- else %} + {{ dst_var }}.{{ param.remote_name }}(&{{ val_var | replace("&", "") }}); + {%- endif %} +{%- elif param.data_type.__class__.__name__ == "Option" %} + {%- if param.data_type.item_type.__class__.__name__ == "StructInput" %} + if let Some(l{{ param.local_name }}) = &{{ val_var }} { + {% set builder_name = param.local_name + "_builder" %} + let mut {{ builder_name }} = {{ sdk_mod_path[-1] }}::{{ param.data_type.item_type.name }}Builder::default(); + {%- for k, v in param.data_type.item_type.fields.items() %} + {%- if v.is_optional %} + if let Some(val) = &l{{ param.local_name }}.{{ v.local_name }} { + {{ set_request_data_from_input(builder_name, v, "val") }} + } + {%- else %} + {{ set_request_data_from_input(builder_name, v, "&l" + param.local_name + "." + v.local_name) }} + {%- endif %} + + {%- endfor %} + {{ dst_var }}.{{ param.remote_name }}({{ builder_name }}.build().expect("A valid object")); + } + + {%- else %} + {{ dst_var }}.{{ param.remote_name }}({{ val_var }}.clone().map(|v| v.into())); + {%- endif %} +{%- else %} + {{ dst_var }}.{{ param.remote_name }}({{ val_var }}); +{%- endif %} +{%- endmacro %} + +{%- macro sdk_enum_array_setter(param, field, val_var, dst_var) %} + {%- set original_type = field.data_type.original_data_type %} + {%- if field.data_type.item_type.__class__.__name__ == "JsonValue" and original_type.__class__.__name__ == "StructInput" %} + {% set builder_name = param.local_name + "_builder" %} + let {{ builder_name }}: Vec<{{ sdk_mod_path[-1] }}::{{ original_type.name }}> = {{ val_var }} + .iter() + .flat_map(|v| + serde_json::from_value::<{{ sdk_mod_path[-1] }}::{{ original_type.name }}>(v.to_owned())) + .collect(); + {{ dst_var }}.{{ param.remote_name }}( + {{ sdk_mod_path[-1] }}::{{ field.remote_name }}({{ builder_name }}) + ); + {%- else %} + {#- Normal array #} + {{ dst_var }}.{{ param.remote_name }}( + {{ sdk_mod_path[-1] }}::{{ field.remote_name }}({{ val_var }}.into_iter()) + ); + {%- endif %} +{%- endmacro %} + +{%- macro sdk_plain_array_setter(param, val_var, dst_var) %} + {%- set original_type = param.data_type.original_data_type %} + {%- set original_item_type = param.data_type.item_type.original_data_type %} + {%- if param.data_type.item_type.__class__.__name__ == "JsonValue" and original_type.__class__.__name__ == "StructInput" %} + {% set builder_name = param.local_name + "_builder" %} + let {{ builder_name }}: Vec<{{ sdk_mod_path[-1] }}::{{ original_type.name }}> = {{ val_var }} + .iter() + .flat_map(|v| + serde_json::from_value::<{{ sdk_mod_path[-1] }}::{{ original_type.name }}>(v.to_owned())) + .collect::<Vec<{{ sdk_mod_path[-1] }}:: {{ original_type.name }}>>(); + {{ dst_var }}.{{ param.remote_name }}({{ builder_name }}); + {%- elif param.data_type.item_type.__class__.__name__ == "String" and original_item_type.__class__.__name__ == "StructInput" %} + {#- Single field structure replaced with only string #} + {%- set original_type = param.data_type.item_type.original_data_type %} + {%- set original_field = original_type.fields[param.data_type.item_type.original_data_type.fields.keys()|list|first] %} + {% set builder_name = param.local_name + "_builder" %} + let {{ builder_name }}: Vec<{{ sdk_mod_path[-1] }}::{{ original_type.name }}> = {{ val_var }} + .iter() + .flat_map(|v| {{ sdk_mod_path[-1] }}::{{ original_type.name }}Builder::default() + .{{ original_field.remote_name }}(v) + .build() + ) + .collect(); + {{ dst_var }}.{{ param.remote_name }}({{ builder_name }}); + {%- elif param.data_type.item_type.__class__.__name__ == "String" and original_type.__class__.__name__ == "ArrayInput" %} + {#- Single field structure replaced with only string #} + {{ dst_var }}.{{ param.remote_name }}( + val.iter() + .cloned() + .map(|x| Vec::from([x.split(",").collect()])) + .collect::<Vec<_>>(), + ); + {%- elif param["setter_type"] is defined %} + {#- Param with setter present #} + {{ dst_var }}.{{ param.remote_name }}( + {{ val_var }}.iter() + ); + {%- elif original_item_type and original_item_type.__class__.__name__ == "DictionaryInput" %} + use std::collections::BTreeMap; + {{ dst_var }}.{{ param.remote_name }}( + {{ val_var }}.iter() + .map( |v| { + v.as_object() + .expect("Is a valid Json object") + .iter() + .map(|(k, v)| (k.clone().into(), v.clone().into())) + .collect::<BTreeMap<_,Value>>() + }) + .collect::<Vec<_>>() + ); + {%- else %} + {#- Normal array #} + {{ dst_var }}.{{ param.remote_name }}( + {{ val_var }}.iter().map(|v| v.into()).collect::<Vec<_>>() + ); + {%- endif %} +{%- endmacro %} + + +{%- macro get_data_for_sdk(data, data_var) %} +{%- if data.__class__.__name__ == "ArrayInput" -%} + {{ data_var }}.iter().cloned() +{%- endif -%} +{%- endmacro %} + +{%- macro set_cli_path_parameters(type_manager, builder, find_mode=False) %} +{%- if not find_mode %} + + // Set path parameters +{%- endif %} +{%- for (k, v) in type_manager.get_parameters("path") %} +{%- if not v.is_required %} + {%- if k != "project_id" %} + if let Some(val) = &self.path.{{ v.local_name }} { + {{ builder }}.{{ v.local_name }}(val); + } + {%- else %} + if let Some(val) = &self.path.{{ v.local_name }} { + {{ builder }}.{{ v.local_name }}(val); + } else { + {{ builder }}.{{ v.local_name }}(client.get_current_project().expect("Project ID must be known").id); + } + {%- endif %} +{%- elif not find_mode and find_present and operation_type in ["show", "set", "download"] %} + let resource_id = find_data["id"] + .as_str() + .expect("Resource ID is a string") + .to_string(); + {{ builder }}.{{ v.local_name }}(resource_id.clone()); +{%- else %} + {{ builder }}.{{ v.local_name }}(&self.path.{{ v.local_name }}); +{%- endif %} +{%- endfor %} +{%- endmacro %} + +{%- macro sdk_build_url(url, params, prefix = "", from_attr = False) %} + {%- if params.values()|selectattr("location", "equalto", "path")|list|length > 0 -%} + format!( + "{{ prefix }}{{ url }}", + {%- for k, v in params.items() %} + {%- if v.location == "path" %} + {{ k }} = {{ ('"' + k + '"') if not from_attr else ('self.' + v.local_name + '.as_ref()') }}, + {%- endif %} + {%- endfor %} + ) + {%- else %} + "{{ prefix }}{{ url }}".to_string() + {%- endif %} +{%- endmacro %} diff --git a/codegenerator/templates/rust_sdk/find.rs.j2 b/codegenerator/templates/rust_sdk/find.rs.j2 new file mode 100644 index 0000000..1143f7c --- /dev/null +++ b/codegenerator/templates/rust_sdk/find.rs.j2 @@ -0,0 +1,146 @@ +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// SPDX-License-Identifier: Apache-2.0 +// +// WARNING: This file is automatically generated from OpenAPI schema using +// `openstack-codegenerator`. +{% import 'rust_macros.j2' as macros with context -%} +use derive_builder::Builder; +use http::{HeaderMap, HeaderName, HeaderValue}; + +use crate::api::find::Findable; +use crate::api::rest_endpoint_prelude::*; +{%- if not name_filter_supported %} +use crate::api::{ApiError, RestClient}; +use tracing::trace; +{%- endif %} + +use crate::api::{{ mod_path | join("::") | replace("::type", "::r#type") }}::{ + get as Get, + {{ list_mod }} as List, +}; + +/// Find for {{ resource_name }} by {{ name_field }}OrId. +#[derive(Debug, Builder, Clone)] +#[builder(setter(strip_option))] +pub struct Request<'a> { + #[builder(setter(into), default)] + id: Cow<'a, str>, + +{%- for k, param in type_manager.parameters.items() %} + {{ param.builder_macros }} + {{ param.local_name }}: {{ param.type_hint }}, +{%- endfor %} + + #[builder(setter(name = "_headers"), default, private)] + _headers: Option<HeaderMap>, +} + +impl<'a> Request<'a> { + /// Create a builder for the endpoint. + pub fn builder() -> RequestBuilder<'a> { + RequestBuilder::default() + } +} + +impl<'a> RequestBuilder<'a> { + /// Add a single header to the Volume. + pub fn header(&mut self, header_name: &'static str, header_value: &'static str) -> &mut Self +where { + self._headers + .get_or_insert(None) + .get_or_insert_with(HeaderMap::new) + .insert(header_name, HeaderValue::from_static(header_value)); + self + } + + /// Add multiple headers. + pub fn headers<I, T>(&mut self, iter: I) -> &mut Self + where + I: Iterator<Item = T>, + T: Into<(Option<HeaderName>, HeaderValue)>, + { + self._headers + .get_or_insert(None) + .get_or_insert_with(HeaderMap::new) + .extend(iter.map(Into::into)); + self + } +} + +impl<'a> Findable for Request<'a> { + type G = Get::Request<'a>; + type L = List::Request{{ list_lifetime }}; + fn get_ep(&self) -> Get::Request<'a> { + let mut ep = Get::Request::builder(); + ep.id(self.id.clone()); +{%- for k, param in type_manager.parameters.items() %} + ep.{{ param.remote_name }}(self.{{ param.local_name}}.clone()); +{%- endfor %} + if let Some(headers) = &self._headers { + ep.headers(headers.iter().map(|(k, v)| (Some(k.clone()), v.clone()))); + } + ep.build().unwrap() + } + + {%- if name_filter_supported %} + fn list_ep(&self) -> List::Request{{ list_lifetime }} { + let mut ep = List::Request::builder(); +{%- for k, param in type_manager.parameters.items() %} + ep.{{ param.remote_name }}(self.{{ param.local_name}}.clone()); +{%- endfor %} + if let Some(headers) = &self._headers { + ep.headers(headers.iter().map(|(k, v)| (Some(k.clone()), v.clone()))); + } + ep.{{ name_field }}(self.id.clone()); + ep.build().unwrap() + } + {%- else %} + fn list_ep(&self) -> List::Request{{ list_lifetime }} { + let mut ep = List::Request::builder(); +{%- for k, param in type_manager.parameters.items() %} + ep.{{ param.remote_name }}(self.{{ param.local_name}}.clone()); +{%- endfor %} + if let Some(headers) = &self._headers { + ep.headers(headers.iter().map(|(k, v)| (Some(k.clone()), v.clone()))); + } + ep.build().unwrap() + } + {%- endif %} + {%- if not name_filter_supported %} + /// Locate {{ resource_name }} in a list + fn locate_resource_in_list<C: RestClient>( + &self, + data: Vec<serde_json::Value>, + ) -> Result<serde_json::Value, ApiError<C::Error>> { + // {{ resource_name }} is not supporting name as query parameter to the list. + // Therefore it is necessary to go through complete list of results. + let mut maybe_result: Option<serde_json::Value> = None; + for item in data.iter() { + trace!("Validate item {:?} is what we search for", item); + if let Some(name_as_val) = item.get("{{ name_field }}") { + if let Some(name) = name_as_val.as_str() { + if name == self.id { + if maybe_result.is_none() { + maybe_result = Some(item.clone()); + } else { + return Err(ApiError::IdNotUnique); + } + } + } + } + } + maybe_result.ok_or(ApiError::ResourceNotFound) + } + {%- endif %} +} diff --git a/codegenerator/templates/rust_sdk/impl.rs.j2 b/codegenerator/templates/rust_sdk/impl.rs.j2 new file mode 100644 index 0000000..48d7cc4 --- /dev/null +++ b/codegenerator/templates/rust_sdk/impl.rs.j2 @@ -0,0 +1,388 @@ +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// SPDX-License-Identifier: Apache-2.0 +// +// WARNING: This file is automatically generated from OpenAPI schema using +// `openstack-codegenerator`. +{% import 'rust_macros.j2' as macros with context -%} +{{ macros.mod_docstring(command_description) }} +use derive_builder::Builder; +use http::{HeaderMap, HeaderName, HeaderValue}; + +use crate::api::rest_endpoint_prelude::*; + +{% for mod in type_manager.get_imports() | sort %} +use {{ mod }}; +{%- endfor %} + +{% if is_json_patch %} +use json_patch::Patch; +{%- endif %} + +{%- if operation_type == "list" and "limit" in type_manager.parameters.keys() or "marker" in type_manager.parameters.keys() %} +use crate::api::Pageable; +{%- endif %} + +{%- include "rust_sdk/subtypes.j2" %} + +{%- include "rust_sdk/request_struct.j2" %} + +{%- with request = type_manager.get_root_data_type() %} +impl{{ type_manager.get_request_static_lifetimes(request) }} Request{{ type_manager.get_request_static_lifetimes(request) }} { + /// Create a builder for the endpoint. + pub fn builder() -> RequestBuilder{{ type_manager.get_request_static_lifetimes(request) }} { + RequestBuilder::default() + } +} + +impl{{ type_manager.get_request_static_lifetimes(request) }} RequestBuilder{{ type_manager.get_request_static_lifetimes(request) }} { + +{%- for param in type_manager.parameters.values() %} +{%- if param.setter_type == "csv" %} + {{ macros.docstring(param.description, indent=4) }} + pub fn {{param.setter_name}}<I, T>(&mut self, iter: I) -> &mut Self + where + I: Iterator<Item = T>, + T: Into<{{ param.data_type.item_type.type_hint }}>, + { + self.{{param.setter_name}} + .get_or_insert(None) + .get_or_insert_with(CommaSeparatedList::new) + .extend(iter.map(Into::into)); + self + } +{%- elif param.setter_type=="set" %} + {{ macros.docstring(param.description, indent=4) }} + pub fn {{param.setter_name}}<I, T>(&mut self, iter: I) -> &mut Self + where + I: Iterator<Item = T>, + T: Into<{{ param.data_type.item_type.type_hint }}>, + { + self.{{ param.setter_name }} + .get_or_insert_with(BTreeSet::new) + .extend(iter.map(Into::into)); + self + } +{%- elif param.setter_type == "list" %} + {{ macros.docstring(param.description, indent=4) }} + pub fn {{param.setter_name}}<I, T>(&mut self, iter: I) -> &mut Self + where + I: Iterator<Item = T>, + T: Into<{{ param.data_type.item_type.type_hint }}>, + { + self.{{param.setter_name}} + {%- if not param.is_required %} + .get_or_insert(None) + {%- endif %} + .get_or_insert_with(Vec::new) + .extend(iter.map(Into::into)); + self + } +{%- endif %} +{%- endfor %} + +{%- if request.fields is defined %} + {%- for field in request.fields.values() %} + {%- if "private" in field.builder_macros %} + {{ macros.sdk_builder_setter(field)}} + {%- endif %} + {%- endfor %} +{%- endif %} + + /// Add a single header to the {{ class_name }}. + pub fn header(&mut self, header_name: &'static str, header_value: &'static str) -> &mut Self + where { + self._headers + .get_or_insert(None) + .get_or_insert_with(HeaderMap::new) + .insert(header_name, HeaderValue::from_static(header_value)); + self + } + + /// Add multiple headers. + pub fn headers<I, T>(&mut self, iter: I) -> &mut Self + where + I: Iterator<Item = T>, + T: Into<(Option<HeaderName>, HeaderValue)>, + { + self._headers + .get_or_insert(None) + .get_or_insert_with(HeaderMap::new) + .extend(iter.map(Into::into)); + self + } + + {% if request.additional_fields_type is defined and request.additional_fields_type %} + pub fn properties<I, K, V>(&mut self, iter: I) -> &mut Self + where + I: Iterator<Item = (K, V)>, + K: Into<Cow<'a, str>>, + V: Into<{{ request.additional_fields_type.type_hint }}>, + { + self._properties + .get_or_insert_with(BTreeMap::new) + .extend(iter.map(|(k, v)| (k.into(), v.into()))); + self + } + + {%- endif %} + + {%- if request.__class__.__name__ == "BTreeMap" %} + pub fn properties<I, K, V>(&mut self, iter: I) -> &mut Self + where + I: Iterator<Item = (K, V)>, + K: Into<Cow<'a, str>>, + V: Into<{{ request.value_type.type_hint }}>, + { + self._properties + .get_or_insert_with(BTreeMap::new) + .extend(iter.map(|(k, v)| (k.into(), v.into()))); + self + } + + {%- endif %} +} + +impl{{ type_manager.get_request_static_lifetimes(request) }} RestEndpoint for Request{{ type_manager.get_request_static_lifetimes(request) }} { + fn method(&self) -> http::Method { + http::Method::{{ method | upper }} + } + + fn endpoint(&self) -> Cow<'static, str> { + {{ macros.sdk_build_url(url, type_manager.parameters, "", true) }}.into() + } + + fn parameters(&self) -> QueryParams { + {%- if type_manager.parameters.values()|selectattr("location", "equalto", "query")|list|length > 0 %} + let mut params = QueryParams::default(); + + {%- for param in type_manager.parameters.values() %} + {%- if param.location == "query" %} + {%- if param.data_type.__class__.__name__ == "Null" %} + params.push_opt_key_only( + "{{ param.remote_name }}", + self.{{ param.local_name }}.as_ref() + ); + {%- elif not param.type_hint.startswith("BTreeSet") %} + params.push_opt( + "{{ param.remote_name }}", + self.{{ param.local_name}} + {%- if "Cow<" in param.type_hint %} + .as_ref() + {%- endif %} + ); + {%- else %} + params.extend(self.{{ param.local_name }}.iter().map(|value| ("{{ param.remote_name }}", value))); + {%- endif %} + {%- endif %} + {%- endfor %} + + params + {%- else %} + QueryParams::default() + {%- endif %} + } + +{% if request.fields is defined and request.fields %} + fn body(&self) -> Result<Option<(&'static str, Vec<u8>)>, BodyError> { + let mut params = JsonBodyParams::default(); + + {% for k, v in request.fields.items() %} + {%- if v.data_type.__class__.__name__ != "Null" %} + {%- if v.is_optional %} + if let Some(val) = &self.{{ v.local_name }} { + params.push("{{ k }}", serde_json::to_value(val)?); + } + {%- else %} + params.push("{{ k }}", serde_json::to_value(&self.{{v.local_name}})?); + {%- endif %} + {%- else %} + params.push("{{ k }}", Value::Null); + {%- endif %} + {%- endfor %} + + {%- if request.additional_fields_type %} + for (key, val) in &self._properties { + params.push(key.clone(), serde_json::Value::from(val.clone())); + } + {%- endif %} + + params.into_body() + } +{% elif is_json_patch %} + fn body(&self) -> Result<Option<(&'static str, Vec<u8>)>, BodyError> { + Ok(Some(( + "{{ mime_type }}", + serde_json::to_string(&self.patch)?.into_bytes(), + ))) + } + +{%- elif request.__class__.__name__ == "BTreeMap" %} + fn body(&self) -> Result<Option<(&'static str, Vec<u8>)>, BodyError> { + let mut params = JsonBodyParams::default(); + + for (key, val) in &self._properties { + params.push(key.clone(), serde_json::Value::from(val.clone())); + } + + params.into_body() + } +{%- endif %} + + fn service_type(&self) -> ServiceType { + ServiceType::{{ sdk_service_name }} + } + + fn response_key(&self) -> Option<Cow<'static, str>> { +{%- if response_key %} + Some("{{ response_key }}".into()) +{%- else %} + None +{%- endif %} + } + +{% if response_list_item_key %} + fn response_list_item_key(&self) -> Option<Cow<'static, str>> { + Some("{{ response_list_item_key }}".into()) + } +{%- endif %} + + /// Returns headers to be set into the request + fn request_headers(&self) -> Option<&HeaderMap> { + self._headers.as_ref() + } +} + +{#- EP is pageable if operation_type is list and there is limit or marker query parameter #} +{%- if operation_type == "list" and "limit" in type_manager.parameters.keys() or "marker" in type_manager.parameters.keys() %} +impl{{ type_manager.get_request_static_lifetimes(request) }} Pageable for Request{{ type_manager.get_request_static_lifetimes(request) }} {} +{%- endif %} + +#[cfg(test)] +mod tests { + #![allow(unused_imports)] + use super::*; +{%- if method.upper() == "HEAD" %} + use crate::api::RawQuery; +{%- else %} + use crate::api::Query; + use serde_json::json; +{%- endif %} + use crate::types::ServiceType; + use crate::test::client::MockServerClient; + use http::{HeaderName, HeaderValue}; + {%- if is_json_patch %} + use serde_json::from_value; + use json_patch::Patch; + {%- endif %} + + #[test] + fn test_service_type() { + assert_eq!(Request::builder() + {{ ".patch(from_value::<Patch>(json!([])).unwrap())" if is_json_patch else "" }} + {{ request.get_mandatory_init() }} + .build().unwrap().service_type(), ServiceType::{{ sdk_service_name }}); + } + + #[test] + fn test_response_key() { +{%- if response_key %} + assert_eq!( + Request::builder() + {{ ".patch(from_value::<Patch>(json!([])).unwrap())" if is_json_patch else "" }} + {{ request.get_mandatory_init() }} + .build().unwrap().response_key().unwrap(), + "{{ response_key }}" + ); +{%- else %} + assert!(Request::builder() + {{ ".patch(from_value::<Patch>(json!([])).unwrap())" if is_json_patch else "" }} + {{ request.get_mandatory_init() }} + .build().unwrap().response_key().is_none()) +{%- endif %} + } + + #[test] + fn endpoint() { + let client = MockServerClient::new(); + let mock = client.server.mock(|when, then| { + when.method(httpmock::Method::{{ method | upper }}) + .path({{ macros.sdk_build_url(url, type_manager.parameters, "/") }}); + + then.status(200) + .header("content-type", "application/json") +{%- if method.upper() != "HEAD" %} + .json_body(json!({ "{{ response_key or "dummy" }}": {} })) +{%- endif %}; + }); + + let endpoint = Request::builder() + {%- for k, v in type_manager.parameters.items() %} + {%- if v.location == "path" %} + .{{ v.local_name }}("{{ k }}") + {%- endif %} + {%- endfor %} + {{ ".patch(from_value::<Patch>(json!([])).unwrap())" if is_json_patch else "" }} + {{ request.get_mandatory_init() }} + .build().unwrap(); +{%- if method.upper() != "HEAD" %} + let _: serde_json::Value = endpoint.query(&client).unwrap(); +{%- else %} + let _ = endpoint.raw_query(&client).unwrap(); +{%- endif %} + mock.assert(); + } + + #[test] + fn endpoint_headers() { + let client = MockServerClient::new(); + let mock = client.server.mock(|when, then| { + when.method(httpmock::Method::{{ method|upper }}) + .path({{ macros.sdk_build_url(url, type_manager.parameters, "/") }}) + .header("foo", "bar") + .header("not_foo", "not_bar"); + then.status(200) + .header("content-type", "application/json") +{%- if method.upper() != "HEAD" %} + .json_body(json!({ "{{ response_key or "dummy" }}": {} })) +{%- endif %}; + }); + + let endpoint = Request::builder() + {%- for k, v in type_manager.parameters.items() %} + {%- if v.location == "path" %} + .{{ v.local_name }}("{{ k }}") + {%- endif %} + {%- endfor %} + {{ ".patch(from_value::<Patch>(json!([])).unwrap())" if is_json_patch else "" }} + {{ request.get_mandatory_init() }} + .headers( + [( + Some(HeaderName::from_static("foo")), + HeaderValue::from_static("bar"), + )] + .into_iter(), + ) + .header("not_foo", "not_bar") + .build() + .unwrap(); +{%- if method.upper() != "HEAD" %} + let _: serde_json::Value = endpoint.query(&client).unwrap(); +{%- else %} + let _ = endpoint.raw_query(&client).unwrap(); +{%- endif %} + mock.assert(); + } +} + +{%- endwith %} diff --git a/codegenerator/templates/rust_sdk/mod.rs.j2 b/codegenerator/templates/rust_sdk/mod.rs.j2 new file mode 100644 index 0000000..f161d9a --- /dev/null +++ b/codegenerator/templates/rust_sdk/mod.rs.j2 @@ -0,0 +1,30 @@ +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// +// SPDX-License-Identifier: Apache-2.0 +// +// WARNING: This file is automatically generated from OpenAPI schema using +// `openstack-codegenerator`. +{% if mod_path|length > 2 %} +//! `{{ url }}` REST operations of {{ service_name }} +{%- else %} +//! `{{ service_name|capitalize }}` Service bindings +{%- endif %} + +{%- for mod in mod_list|sort %} +{%- if mod in ["type"] %} +pub mod r#{{ mod }}; +{%- else %} +pub mod {{ mod }}; +{%- endif %} +{%- endfor %} + diff --git a/codegenerator/templates/rust_sdk/request_struct.j2 b/codegenerator/templates/rust_sdk/request_struct.j2 new file mode 100644 index 0000000..f28805f --- /dev/null +++ b/codegenerator/templates/rust_sdk/request_struct.j2 @@ -0,0 +1,57 @@ +{#- Template to represent the root request structure -#} +{%- import 'rust_macros.j2' as macros with context -%} +{%- with data_type = type_manager.get_root_data_type() %} + +{%- if data_type.__class__.__name__ == "Struct" %} +#[derive(Builder, Debug, Clone)] +#[builder(setter(strip_option))] +pub struct {{ data_type.name }}{{ type_manager.get_request_static_lifetimes(data_type) }} { + {%- for field in data_type.fields.values() %} + {%- if field.data_type.__class__.__name__ != "Null" %} + {{ macros.docstring(field.description, indent=4) }} + {{ field.builder_macros }} + pub(crate) {{ field.local_name }}: {{ field.type_hint }}, + {%- endif %} + {%- endfor %} + +{%- for k, param in type_manager.parameters.items() %} + {{ macros.docstring(param.description, indent=4) }} + {{ param.builder_macros }} + {{ param.local_name }}: {{ param.type_hint }}, +{%- endfor %} + +{% if is_json_patch %} + /// Patch data + #[builder()] + patch: Patch, +{%- endif %} + + #[builder(setter(name = "_headers"), default, private)] + _headers: Option<HeaderMap>, + + {%- if data_type.additional_fields_type %} + #[builder(setter(name = "_properties"), default, private)] + _properties: BTreeMap<Cow<'a, str>, {{ data_type.additional_fields_type.type_hint }}>, + {%- endif %} +} + +{%- elif data_type.__class__.__name__ == "BTreeMap" %} +#[derive(Builder, Debug, Clone)] +#[builder(setter(strip_option))] +pub struct Request{{ type_manager.get_request_static_lifetimes(data_type) }} { +{%- for k, param in type_manager.parameters.items() %} + {{ macros.docstring(param.description, indent=4) }} + {{ param.builder_macros }} + {{ param.local_name }}: {{ param.type_hint }}, +{%- endfor %} + + #[builder(setter(name = "_headers"), default, private)] + _headers: Option<HeaderMap>, + + #[builder(setter(name = "_properties"), default, private)] + _properties: BTreeMap<Cow<'a, str>, {{ data_type.value_type.type_hint }}>, +} + + +{%- endif %} +{%- endwith %} diff --git a/codegenerator/templates/rust_sdk/subtypes.j2 b/codegenerator/templates/rust_sdk/subtypes.j2 new file mode 100644 index 0000000..4e54e68 --- /dev/null +++ b/codegenerator/templates/rust_sdk/subtypes.j2 @@ -0,0 +1,70 @@ +{%- import 'rust_macros.j2' as macros with context -%} +{%- for subtype in type_manager.get_subtypes() %} +{{ macros.docstring(subtype.description, indent=0) }} +{%- if subtype.derive_container_macros %} +{{ subtype.derive_container_macros }} +{%- endif %} +{%- if subtype.builder_container_macros %} +{{ subtype.builder_container_macros }} +{%- endif %} +{%- if subtype.serde_container_macros %} +{{ subtype.serde_container_macros }} +{%- endif %} +pub {{ subtype.base_type }} {{ subtype.name }}{{ ("<" + ",".join(subtype.lifetimes) + ">") if subtype.lifetimes else ""}} { + {%- if subtype["fields"] is defined %} + {#- Struct fields #} + {%- for field in subtype.fields.values() %} + {{ macros.docstring(field.description, indent=4) }} + {{ field.serde_macros }} + {{ field.builder_macros }} + pub(crate) {{ field.local_name }}: {{ field.type_hint }}, + {%- endfor %} + {%- elif subtype["kinds"] is defined %} + {#- Enum kinds #} + {%- for name, kind in subtype.kinds | dictsort %} + {{- macros.docstring(kind.description, indent=4) }} + {{ name }}({{ kind.type_hint }}), + {%- endfor %} + {%- elif subtype["variants"] is defined %} + {#- StringEnum kinds #} + {%- for k, v in subtype.variants | dictsort %} + {{ subtype.variant_serde_macros(k) }} + {{ k }}, + {%- endfor %} + {%- endif %} + + {%- if subtype.base_type == "struct" and subtype.additional_fields_type %} + + #[builder(setter(name = "_properties"), default, private)] + #[serde(flatten)] + _properties: BTreeMap<Cow<'a, str>, {{ subtype.additional_fields_type.type_hint }}>, + {%- endif %} +} + +{% if type_manager.subtype_requires_private_builders(subtype) %} +impl{{ ("<" + ",".join(subtype.lifetimes) + ">") if subtype.lifetimes else ""}} {{ subtype.name }}Builder{{ ("<" + ",".join(subtype.lifetimes) + ">") if subtype.lifetimes else ""}} { + + {%- for field in subtype.fields.values() %} + {%- if "private" in field.builder_macros %} + {{ macros.sdk_builder_setter(field)}} + {%- endif %} + {%- endfor %} + + {% if subtype.additional_fields_type is defined and subtype.additional_fields_type %} + pub fn properties<I, K, V>(&mut self, iter: I) -> &mut Self + where + I: Iterator<Item = (K, V)>, + K: Into<Cow<'a, str>>, + V: Into<{{ subtype.additional_fields_type.type_hint }}>, + { + self._properties + .get_or_insert_with(BTreeMap::new) + .extend(iter.map(|(k, v)| (k.into(), v.into()))); + self + } + + {%- endif %} + +} +{% endif %} +{%- endfor %} diff --git a/codegenerator/templates/sdk/impl_schema.py.j2 b/codegenerator/templates/sdk/impl_schema.py.j2 new file mode 100644 index 0000000..0c09909 --- /dev/null +++ b/codegenerator/templates/sdk/impl_schema.py.j2 @@ -0,0 +1,24 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +# ATTENTION: this file is generated by the code generator + +{%- if srv_ver_mod %} +from {{ srv_ver_mod }}.schemas import {{ class_name | lower }}_openapi_schema +{%- endif %} + +{%- if schema is defined %} +SCHEMA = {{ schema }} +{%- elif spec is defined %} +SPEC = {{ spec }} +{%- endif %} diff --git a/codegenerator/tests/functional/__init__.py b/codegenerator/tests/functional/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/codegenerator/tests/functional/test_openapi_cinder.py b/codegenerator/tests/functional/test_openapi_cinder.py new file mode 100644 index 0000000..392fe50 --- /dev/null +++ b/codegenerator/tests/functional/test_openapi_cinder.py @@ -0,0 +1,37 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import tempfile +from pathlib import Path +from unittest import TestCase + + +class Args: + def __init__(self, validate: bool = False): + self.validate: bool = validate + self.api_ref_src: str | None = None + + +class TestGenerator(TestCase): + def test_generate(self): + from codegenerator.openapi import cinder + + generator = cinder.CinderV3Generator() + work_dir = tempfile.TemporaryDirectory() + + generator.generate(work_dir.name, Args(validate=True)) + + self.assertTrue( + Path( + work_dir.name, "openapi_specs", "block-storage", "v3.yaml" + ).exists() + ) diff --git a/codegenerator/tests/functional/test_openapi_glance.py b/codegenerator/tests/functional/test_openapi_glance.py new file mode 100644 index 0000000..8ee4695 --- /dev/null +++ b/codegenerator/tests/functional/test_openapi_glance.py @@ -0,0 +1,35 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import tempfile +from pathlib import Path +from unittest import TestCase + + +class Args: + def __init__(self, validate: bool = False): + self.validate: bool = validate + self.api_ref_src: str | None = None + + +class TestGenerator(TestCase): + def test_generate(self): + from codegenerator.openapi import glance + + generator = glance.GlanceGenerator() + work_dir = tempfile.TemporaryDirectory() + + generator.generate(work_dir.name, Args(validate=True)) + + self.assertTrue( + Path(work_dir.name, "openapi_specs", "image", "v2.yaml").exists() + ) diff --git a/codegenerator/tests/functional/test_openapi_keystone.py b/codegenerator/tests/functional/test_openapi_keystone.py new file mode 100644 index 0000000..c6be527 --- /dev/null +++ b/codegenerator/tests/functional/test_openapi_keystone.py @@ -0,0 +1,37 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import tempfile +from pathlib import Path +from unittest import TestCase + + +class Args: + def __init__(self, validate: bool = False): + self.validate: bool = validate + self.api_ref_src: str | None = None + + +class TestGenerator(TestCase): + def test_generate(self): + from codegenerator.openapi import keystone + + generator = keystone.KeystoneGenerator() + work_dir = tempfile.TemporaryDirectory() + + generator.generate(work_dir.name, Args(validate=True)) + + self.assertTrue( + Path( + work_dir.name, "openapi_specs", "identity", "v3.yaml" + ).exists() + ) diff --git a/codegenerator/tests/functional/test_openapi_neutron.py b/codegenerator/tests/functional/test_openapi_neutron.py new file mode 100644 index 0000000..a839042 --- /dev/null +++ b/codegenerator/tests/functional/test_openapi_neutron.py @@ -0,0 +1,35 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import tempfile +from pathlib import Path +from unittest import TestCase + + +class Args: + def __init__(self, validate: bool = False): + self.validate: bool = validate + self.api_ref_src: str | None = None + + +class TestGenerator(TestCase): + def test_generate(self): + from codegenerator.openapi import neutron + + generator = neutron.NeutronGenerator() + work_dir = tempfile.TemporaryDirectory() + + generator.generate(work_dir.name, Args(validate=True)) + + self.assertTrue( + Path(work_dir.name, "openapi_specs", "network", "v2.yaml").exists() + ) diff --git a/codegenerator/tests/functional/test_openapi_nova.py b/codegenerator/tests/functional/test_openapi_nova.py new file mode 100644 index 0000000..4054b00 --- /dev/null +++ b/codegenerator/tests/functional/test_openapi_nova.py @@ -0,0 +1,35 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import tempfile +from pathlib import Path +from unittest import TestCase + + +class Args: + def __init__(self, validate: bool = False): + self.validate: bool = validate + self.api_ref_src: str | None = None + + +class TestGenerator(TestCase): + def test_generate(self): + from codegenerator.openapi import nova + + generator = nova.NovaGenerator() + work_dir = tempfile.TemporaryDirectory() + + generator.generate(work_dir.name, Args(validate=True)) + + self.assertTrue( + Path(work_dir.name, "openapi_specs", "compute", "v2.yaml").exists() + ) diff --git a/codegenerator/tests/functional/test_openapi_octavia.py b/codegenerator/tests/functional/test_openapi_octavia.py new file mode 100644 index 0000000..1d8cac5 --- /dev/null +++ b/codegenerator/tests/functional/test_openapi_octavia.py @@ -0,0 +1,37 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import tempfile +from pathlib import Path +from unittest import TestCase + + +class Args: + def __init__(self, validate: bool = False): + self.validate: bool = validate + self.api_ref_src: str | None = None + + +class TestGenerator(TestCase): + def test_generate(self): + from codegenerator.openapi import octavia + + generator = octavia.OctaviaGenerator() + work_dir = tempfile.TemporaryDirectory() + + generator.generate(work_dir.name, Args(validate=True)) + + self.assertTrue( + Path( + work_dir.name, "openapi_specs", "load-balancing", "v2.yaml" + ).exists() + ) diff --git a/codegenerator/tests/unit/__init__.py b/codegenerator/tests/unit/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/codegenerator/tests/unit/test_common.py b/codegenerator/tests/unit/test_common.py new file mode 100644 index 0000000..d186329 --- /dev/null +++ b/codegenerator/tests/unit/test_common.py @@ -0,0 +1,152 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +from unittest import TestCase + +from typing import Any + +from codegenerator import common + + +class TestFindResponseSchema(TestCase): + FOO = {"foo": {"type": "string"}} + + # def setUp(self): + # super().setUp() + # logging.basicConfig(level=logging.DEBUG) + + def test_find_with_single_candidate(self): + responses = { + "200": { + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": {**self.FOO}, + } + } + } + } + } + self.assertEqual( + responses["200"]["content"]["application/json"]["schema"], + common.find_response_schema(responses, "foo"), + ) + + def test_find_with_list(self): + responses = { + "200": { + "content": { + "application/json": { + "schema": { + "type": "object", + "properties": { + "foos": {"type": "array", "items": self.FOO} + }, + } + } + } + } + } + self.assertEqual( + responses["200"]["content"]["application/json"]["schema"], + common.find_response_schema(responses, "foo"), + ) + + def test_find_correct_action(self): + foo_action = { + "type": "string", + "x-openstack": {"action-name": "foo-action"}, + } + bar_action = { + "type": "string", + "x-openstack": {"action-name": "bar-action"}, + } + responses: dict[str, Any] = { + "200": { + "content": { + "application/json": { + "schema": {"type": "object", "properties": self.FOO} + } + } + }, + "204": { + "content": { + "application/json": { + "schema": {"oneOf": [foo_action, bar_action]} + } + } + }, + } + self.assertEqual( + foo_action, + common.find_response_schema(responses, "foo", "foo-action"), + ) + self.assertEqual( + bar_action, + common.find_response_schema(responses, "foo", "bar-action"), + ) + self.assertIsNone( + common.find_response_schema(responses, "foo", "baz-action"), + ) + self.assertEqual( + responses["200"]["content"]["application/json"]["schema"], + common.find_response_schema(responses, "foo"), + ) + + def test_no_candidates_returns_root(self): + responses = { + "200": { + "content": { + "application/json": { + "schema": self.FOO["foo"], + } + } + } + } + self.assertEqual( + responses["200"]["content"]["application/json"]["schema"], + common.find_response_schema(responses, "foo"), + ) + + def test_plural(self): + map = { + "policy": "policies", + "server": "servers", + "access": "accesses", + "bus": "buses", + "box": "boxes", + "buzz": "buzzes", + "wish": "wishes", + "clash": "clashes", + "potato": "potatoes", + "axis": "axes", + "elf": "elves", + "knife": "knives", + } + for singular, plural in map.items(): + self.assertEqual(plural, common.get_plural_form(singular)) + + def test_singular(self): + map = { + "policy": "policies", + "server": "servers", + "access": "accesses", + "bus": "buses", + "box": "boxes", + "buzz": "buzzes", + "wish": "wishes", + "clash": "clashes", + "potato": "potatoes", + } + for singular, plural in map.items(): + self.assertEqual(singular, common.get_singular_form(plural)) diff --git a/codegenerator/tests/unit/test_model.py b/codegenerator/tests/unit/test_model.py new file mode 100644 index 0000000..3f35c0e --- /dev/null +++ b/codegenerator/tests/unit/test_model.py @@ -0,0 +1,1330 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import logging +from unittest import TestCase + +from codegenerator import model + + +SAMPLE_SERVER_SCHEMA = { + "type": "object", + "properties": { + "server": { + "type": "object", + "description": "A `server` object.", + "properties": { + "name": { + "type": "string", + "minLength": 1, + "maxLength": 255, + "format": "name", + "description": "dummy description", + }, + "imageRef": { + "oneOf": [ + {"type": "string", "format": "uuid"}, + {"type": "string", "maxLength": 0}, + ] + }, + "flavorRef": {"type": ["string", "integer"], "minLength": 1}, + "adminPass": {"type": "string"}, + "metadata": { + "type": "object", + "description": "metadata description", + "additionalProperties": False, + "patternProperties": { + "^[a-zA-Z0-9-_:. ]{1,255}$": { + "type": "string", + "maxLength": 255, + } + }, + }, + "networks": { + "oneOf": [ + { + "type": "array", + "items": { + "type": "object", + "properties": { + "fixed_ip": { + "type": "string", + "oneOf": [ + {"format": "ipv4"}, + {"format": "ipv6"}, + ], + }, + "port": { + "oneOf": [ + { + "type": "string", + "format": "uuid", + }, + {"type": "null"}, + ] + }, + "uuid": { + "type": "string", + "format": "uuid", + }, + "tag": { + "type": "string", + "minLength": 1, + "maxLength": 60, + "pattern": "^[^,/]*$", + }, + }, + "additionalProperties": False, + }, + }, + {"type": "string", "enum": ["none", "auto"]}, + ], + "description": "Networks description", + }, + "OS-DCF:diskConfig": { + "type": "string", + "enum": ["AUTO", "MANUAL"], + "description": "DiskConfig description", + }, + "accessIPv4": { + "type": "string", + "format": "ipv4", + "description": "IPv4 address", + }, + "availability_zone": { + "type": "string", + "minLength": 1, + "maxLength": 255, + "format": "name", + "description": "A target cell name.", + }, + "block_device_mapping": { + "type": "array", + "items": { + "type": "object", + "properties": { + "virtual_name": { + "type": "string", + "maxLength": 255, + }, + "volume_id": {"type": "string", "format": "uuid"}, + "snapshot_id": { + "type": "string", + "format": "uuid", + }, + "volume_size": { + "type": ["integer", "string"], + "pattern": "^[0-9]+$", + "minimum": 1, + "maximum": 2147483647, + }, + "device_name": { + "type": "string", + "minLength": 1, + "maxLength": 255, + "pattern": "^[a-zA-Z0-9._-r/]*$", + }, + "delete_on_termination": { + "type": ["boolean", "string"], + "enum": [ + True, + "True", + False, + "False", + ], + }, + "no_device": {}, + "connection_info": { + "type": "string", + "maxLength": 16777215, + }, + }, + "additionalProperties": False, + }, + }, + "block_device_mapping_v2": { + "type": "array", + "items": { + "type": "object", + "properties": { + "virtual_name": { + "type": "string", + "maxLength": 255, + }, + "volume_id": {"type": "string", "format": "uuid"}, + "snapshot_id": { + "type": "string", + "format": "uuid", + }, + "volume_size": { + "type": ["integer", "string"], + "pattern": "^[0-9]+$", + "minimum": 1, + "maximum": 2147483647, + }, + "device_name": { + "type": "string", + "minLength": 1, + "maxLength": 255, + "pattern": "^[a-zA-Z0-9._-r/]*$", + }, + "delete_on_termination": { + "type": ["boolean", "string"], + "enum": [ + True, + "True", + False, + "False", + ], + }, + "no_device": {}, + "connection_info": { + "type": "string", + "maxLength": 16777215, + }, + "source_type": { + "type": "string", + "enum": [ + "volume", + "image", + "snapshot", + "blank", + ], + }, + "uuid": { + "type": "string", + "minLength": 1, + "maxLength": 255, + "pattern": "^[a-zA-Z0-9._-]*$", + }, + "image_id": {"type": "string", "format": "uuid"}, + "destination_type": { + "type": "string", + "enum": ["local", "volume"], + }, + "guest_format": { + "type": "string", + "maxLength": 255, + }, + "device_type": { + "type": "string", + "maxLength": 255, + }, + "disk_bus": {"type": "string", "maxLength": 255}, + "boot_index": { + "type": ["integer", "string", "null"], + "pattern": "^-?[0-9]+$", + }, + "tag": { + "type": "string", + "minLength": 1, + "maxLength": 60, + "pattern": "^[^,/]*$", + }, + "volume_type": { + "type": ["string", "null"], + "minLength": 0, + "maxLength": 255, + }, + }, + "additionalProperties": False, + }, + "description": "descr", + }, + "config_drive": { + "type": ["boolean", "string"], + "enum": ["No", "no", False], + }, + "min_count": { + "type": ["integer", "string"], + "pattern": "^[0-9]*$", + "minimum": 1, + "minLength": 1, + }, + "security_groups": { + "type": "array", + "items": { + "type": "object", + "properties": { + "name": { + "type": "string", + "minLength": 1, + "maxLength": 255, + "format": "name", + "description": "A target cell name. Schedule the server in a host in the cell specified.", + } + }, + "additionalProperties": False, + }, + "description": "SG descr", + }, + "user_data": { + "type": "string", + "format": "base64", + "maxLength": 65535, + "description": "user data", + }, + "description": { + "type": ["string", "null"], + "minLength": 0, + "maxLength": 255, + "pattern": "regex_pattern", + }, + "tags": { + "type": "array", + "items": { + "type": "string", + "minLength": 1, + "maxLength": 60, + "pattern": "^[^,/]*$", + }, + "maxItems": 50, + }, + "trusted_image_certificates": { + "type": ["array", "null"], + "minItems": 1, + "maxItems": 50, + "uniqueItems": True, + "items": {"type": "string", "minLength": 1}, + }, + "host": { + "type": "string", + "minLength": 1, + "maxLength": 255, + "pattern": "^[a-zA-Z0-9-._]*$", + }, + "hypervisor_hostname": { + "type": "string", + "minLength": 1, + "maxLength": 255, + "pattern": "^[a-zA-Z0-9-._]*$", + }, + "hostname": { + "type": "string", + "minLength": 1, + "maxLength": 255, + "pattern": "^[a-zA-Z0-9-._]*$", + }, + }, + "additionalProperties": False, + "required": ["name", "flavorRef", "networks"], + }, + "os:scheduler_hints": { + "type": "object", + "description": "scheduler hints description", + "properties": { + "group": {"type": "string", "format": "uuid"}, + "different_host": { + "oneOf": [ + {"type": "string", "format": "uuid"}, + { + "type": "array", + "items": {"type": "string", "format": "uuid"}, + }, + ] + }, + "same_host": { + "type": ["string", "array"], + "items": {"type": "string", "format": "uuid"}, + "description": "A list of server UUIDs or a server UUID.", + }, + "query": {"type": ["string", "object"]}, + "target_cell": { + "type": "string", + "minLength": 1, + "maxLength": 255, + "format": "name", + }, + "different_cell": { + "type": ["string", "array"], + "items": {"type": "string"}, + }, + "build_near_host_ip": { + "type": "string", + "oneOf": [{"format": "ipv4"}, {"format": "ipv6"}], + "description": "Schedule the server on a host in the network specified with", + }, + "cidr": {"type": "string", "pattern": "^/[0-9a-f.:]+$"}, + }, + "additionalProperties": True, + }, + "OS-SCH-HNT:scheduler_hints": { + "type": "object", + "properties": { + "group": {"type": "string", "format": "uuid"}, + "different_host": { + "oneOf": [ + {"type": "string", "format": "uuid"}, + { + "type": "array", + "items": {"type": "string", "format": "uuid"}, + }, + ], + "description": "A list of server UUIDs or a server UUID.\nSchedule the server on a different host from a set of servers.\nIt is available when `DifferentHostFilter` is available on cloud side.", + }, + "same_host": { + "type": ["string", "array"], + "items": {"type": "string", "format": "uuid"}, + }, + "query": {"type": ["string", "object"]}, + "target_cell": { + "type": "string", + "minLength": 1, + "maxLength": 255, + "format": "name", + }, + "different_cell": { + "type": ["string", "array"], + "items": {"type": "string"}, + }, + "build_near_host_ip": { + "type": "string", + "oneOf": [{"format": "ipv4"}, {"format": "ipv6"}], + }, + "cidr": {"type": "string", "pattern": "^/[0-9a-f.:]+$"}, + }, + "additionalProperties": True, + }, + }, + "additionalProperties": False, + "x-openstack": {"min-ver": "2.94"}, + "required": ["server"], +} + +EXPECTED_TLA_DATA = model.Struct( + reference=None, + fields={ + "server": model.StructField( + data_type=model.Reference(name="server", type=model.Struct), + description="A `server` object.", + is_required=True, + min_ver="2.94", + ), + "os:scheduler_hints": model.StructField( + data_type=model.Reference( + name="os:scheduler_hints", type=model.Struct + ), + description="scheduler hints description", + min_ver="2.94", + ), + "OS-SCH-HNT:scheduler_hints": model.StructField( + data_type=model.Reference( + name="OS-SCH-HNT:scheduler_hints", type=model.Struct + ), + min_ver="2.94", + ), + }, +) + +EXPECTED_DATA_TYPES = [ + model.OneOfType( + reference=model.Reference(name="imageRef", type=model.OneOfType), + kinds=[ + model.ConstraintString(format="uuid"), + model.ConstraintString(maxLength=0), + ], + min_ver="2.94", + ), + model.OneOfType( + reference=model.Reference(name="flavorRef", type=model.OneOfType), + kinds=[ + model.ConstraintString(minLength=1), + model.ConstraintInteger(), + ], + min_ver="2.94", + ), + model.Dictionary( + reference=model.Reference(name="metadata", type=model.Dictionary), + description="metadata description", + value_type=model.ConstraintString(maxLength=255), + min_ver="2.94", + ), + model.OneOfType( + reference=model.Reference(name="fixed_ip", type=model.OneOfType), + kinds=[ + model.ConstraintString(format="ipv4"), + model.ConstraintString(format="ipv6"), + ], + min_ver="2.94", + ), + model.OneOfType( + reference=model.Reference(name="port", type=model.OneOfType), + kinds=[ + model.ConstraintString(format="uuid"), + model.PrimitiveNull(), + ], + min_ver="2.94", + ), + model.Struct( + reference=model.Reference(name="networks", type=model.Struct), + fields={ + "fixed_ip": model.StructField( + data_type=model.Reference( + name="fixed_ip", type=model.OneOfType + ), + ), + "port": model.StructField( + data_type=model.Reference(name="port", type=model.OneOfType), + ), + "uuid": model.StructField( + data_type=model.ConstraintString(format="uuid"), + ), + "tag": model.StructField( + data_type=model.ConstraintString( + minLength=1, maxLength=60, pattern="^[^,/]*$" + ), + ), + }, + min_ver="2.94", + ), + model.Array( + reference=model.Reference(name="networks", type=model.Array), + item_type=model.Reference(name="networks", type=model.Struct), + min_ver="2.94", + ), + model.OneOfType( + reference=model.Reference(name="networks", type=model.OneOfType), + kinds=[ + model.Reference(name="networks", type=model.Array), + model.Reference(name="networks", type=model.Enum), + ], + min_ver="2.94", + ), + model.Enum( + reference=model.Reference(name="networks", type=model.Enum), + literals=["none", "auto"], + base_types=[model.ConstraintString], + min_ver="2.94", + ), + model.OneOfType( + reference=model.Reference(name="volume_size", type=model.OneOfType), + kinds=[ + model.ConstraintInteger(minimum=1, maximum=2147483647), + model.ConstraintString(pattern="^[0-9]+$"), + ], + min_ver="2.94", + ), + # model.OneOfType( + # reference=model.Reference( + # name="delete_on_termination", type=model.OneOfType + # ), + # kinds=[ + # model.Reference(name="delete_on_termination", type=model.Enum) + # #model.PrimitiveBoolean(), model.ConstraintString() + # ], + # ), + model.Enum( + reference=model.Reference( + name="delete_on_termination", type=model.Enum + ), + literals=[True, "True", False, "False"], + base_types=[model.ConstraintString, model.PrimitiveBoolean], + min_ver="2.94", + ), + model.Struct( + reference=model.Reference( + name="block_device_mapping", type=model.Struct + ), + fields={ + "virtual_name": model.StructField( + data_type=model.ConstraintString(maxLength=255), + ), + "volume_id": model.StructField( + data_type=model.ConstraintString(format="uuid"), + ), + "snapshot_id": model.StructField( + data_type=model.ConstraintString(format="uuid"), + ), + "volume_size": model.StructField( + data_type=model.Reference( + name="volume_size", type=model.OneOfType + ), + ), + "device_name": model.StructField( + data_type=model.ConstraintString( + minLength=1, + maxLength=255, + pattern="^[a-zA-Z0-9._-r/]*$", + ), + ), + "delete_on_termination": model.StructField( + data_type=model.Reference( + name="delete_on_termination", type=model.Enum + ), + ), + "no_device": model.StructField( + data_type=model.PrimitiveNull(), + ), + "connection_info": model.StructField( + data_type=model.ConstraintString(maxLength=16777215), + ), + }, + min_ver="2.94", + ), + model.OneOfType( + reference=model.Reference(name="volume_size", type=model.OneOfType), + kinds=[ + model.ConstraintInteger(minimum=1, maximum=2147483647), + model.ConstraintString(pattern="^[0-9]+$"), + ], + min_ver="2.94", + ), + model.OneOfType( + reference=model.Reference(name="boot_index", type=model.OneOfType), + kinds=[ + model.ConstraintInteger(), + model.ConstraintString(pattern="^-?[0-9]+$"), + model.PrimitiveNull(), + ], + min_ver="2.94", + ), + model.OneOfType( + reference=model.Reference(name="volume_type", type=model.OneOfType), + kinds=[ + model.ConstraintString(minLength=0, maxLength=255), + model.PrimitiveNull(), + ], + min_ver="2.94", + ), + model.Struct( + reference=model.Reference( + name="block_device_mapping_v2", type=model.Struct + ), + fields={ + "virtual_name": model.StructField( + data_type=model.ConstraintString(maxLength=255), + ), + "volume_id": model.StructField( + data_type=model.ConstraintString( + format="uuid", + ), + ), + "snapshot_id": model.StructField( + data_type=model.ConstraintString( + format="uuid", + ), + ), + "volume_size": model.StructField( + data_type=model.Reference( + name="volume_size", type=model.OneOfType + ), + ), + "device_name": model.StructField( + data_type=model.ConstraintString( + minLength=1, + maxLength=255, + pattern="^[a-zA-Z0-9._-r/]*$", + ), + ), + "delete_on_termination": model.StructField( + data_type=model.Reference( + name="delete_on_termination", type=model.Enum + ), + ), + "no_device": model.StructField( + data_type=model.PrimitiveNull(), + ), + "connection_info": model.StructField( + data_type=model.ConstraintString( + maxLength=16777215, + ), + ), + "source_type": model.StructField( + data_type=model.Reference(name="source_type", type=model.Enum) + ), + "uuid": model.StructField( + data_type=model.ConstraintString( + minLength=1, + maxLength=255, + pattern="^[a-zA-Z0-9._-]*$", + ), + ), + "image_id": model.StructField( + data_type=model.ConstraintString( + format="uuid", + ), + ), + "destination_type": model.StructField( + data_type=model.Reference( + name="destination_type", type=model.Enum + ) + ), + "guest_format": model.StructField( + data_type=model.ConstraintString( + maxLength=255, + ), + ), + "device_type": model.StructField( + data_type=model.ConstraintString( + maxLength=255, + ), + ), + "disk_bus": model.StructField( + data_type=model.ConstraintString( + maxLength=255, + ), + ), + "boot_index": model.StructField( + data_type=model.Reference( + name="boot_index", type=model.OneOfType + ), + ), + "tag": model.StructField( + data_type=model.ConstraintString( + minLength=1, + maxLength=60, + pattern="^[^,/]*$", + ), + ), + "volume_type": model.StructField( + data_type=model.Reference( + name="volume_type", type=model.OneOfType + ), + ), + }, + min_ver="2.94", + ), + model.Array( + reference=model.Reference( + name="block_device_mapping", type=model.Array + ), + item_type=model.Reference( + name="block_device_mapping", type=model.Struct + ), + min_ver="2.94", + ), + model.Array( + reference=model.Reference( + name="block_device_mapping_v2", type=model.Array + ), + item_type=model.Reference( + name="block_device_mapping_v2", type=model.Struct + ), + min_ver="2.94", + ), + model.Enum( + reference=model.Reference(name="config_drive", type=model.Enum), + base_types=[ + model.PrimitiveBoolean, + model.ConstraintString, + ], + literals=set(["No", "no", False]), + min_ver="2.94", + ), + model.OneOfType( + reference=model.Reference(name="min_count", type=model.OneOfType), + kinds=[ + model.ConstraintInteger( + minimum=1, + ), + model.ConstraintString( + minLength=1, + pattern="^[0-9]*$", + ), + ], + min_ver="2.94", + ), + model.Struct( + reference=model.Reference(name="security_groups", type=model.Struct), + fields={ + "name": model.StructField( + data_type=model.ConstraintString( + format="name", minLength=1, maxLength=255 + ), + description="A target cell name. Schedule the server in a host in the cell specified.", + ) + }, + min_ver="2.94", + ), + model.Array( + reference=model.Reference(name="security_groups", type=model.Array), + item_type=model.Reference(name="security_groups", type=model.Struct), + min_ver="2.94", + ), + model.OneOfType( + reference=model.Reference(name="description", type=model.OneOfType), + kinds=[ + model.ConstraintString( + minLength=0, + maxLength=255, + pattern="regex_pattern", + ), + model.PrimitiveNull(), + ], + min_ver="2.94", + ), + model.Array( + reference=model.Reference(name="tags", type=model.Array), + item_type=model.ConstraintString( + format=None, minLength=1, maxLength=60, pattern="^[^,/]*$" + ), + min_ver="2.94", + ), + model.Array( + reference=model.Reference( + name="trusted_image_certificates", type=model.Array + ), + item_type=model.ConstraintString(format=None, minLength=1), + min_ver="2.94", + ), + model.OneOfType( + reference=model.Reference( + name="trusted_image_certificates", type=model.OneOfType + ), + kinds=[ + model.Reference( + name="trusted_image_certificates", type=model.Array + ), + model.PrimitiveNull(), + ], + min_ver="2.94", + ), + model.Struct( + reference=model.Reference(name="server", type=model.Struct), + description="A `server` object.", + fields={ + "name": model.StructField( + data_type=model.ConstraintString( + format="name", minLength=1, maxLength=255 + ), + description="dummy description", + is_required=True, + min_ver="2.94", + ), + "imageRef": model.StructField( + data_type=model.Reference( + name="imageRef", type=model.OneOfType + ), + min_ver="2.94", + ), + "flavorRef": model.StructField( + data_type=model.Reference( + name="flavorRef", type=model.OneOfType + ), + is_required=True, + min_ver="2.94", + ), + "adminPass": model.StructField( + data_type=model.ConstraintString(format=None), min_ver="2.94" + ), + "metadata": model.StructField( + data_type=model.Reference( + name="metadata", type=model.Dictionary + ), + description="metadata description", + min_ver="2.94", + ), + "networks": model.StructField( + data_type=model.Reference( + name="networks", type=model.OneOfType + ), + description="Networks description", + is_required=True, + min_ver="2.94", + ), + "OS-DCF:diskConfig": model.StructField( + data_type=model.Reference( + name="OS-DCF:diskConfig", type=model.Enum + ), + description="DiskConfig description", + min_ver="2.94", + ), + "accessIPv4": model.StructField( + data_type=model.ConstraintString(format="ipv4"), + description="IPv4 address", + min_ver="2.94", + ), + "availability_zone": model.StructField( + data_type=model.ConstraintString( + format="name", minLength=1, maxLength=255 + ), + description="A target cell name.", + min_ver="2.94", + ), + "block_device_mapping": model.StructField( + data_type=model.Reference( + name="block_device_mapping", type=model.Array + ), + min_ver="2.94", + ), + "block_device_mapping_v2": model.StructField( + data_type=model.Reference( + name="block_device_mapping_v2", type=model.Array + ), + description="descr", + min_ver="2.94", + ), + "config_drive": model.StructField( + data_type=model.Reference( + name="config_drive", type=model.Enum + ), + min_ver="2.94", + ), + "min_count": model.StructField( + data_type=model.Reference( + name="min_count", type=model.OneOfType + ), + min_ver="2.94", + ), + "security_groups": model.StructField( + data_type=model.Reference( + name="security_groups", type=model.Array + ), + description="SG descr", + min_ver="2.94", + ), + "user_data": model.StructField( + data_type=model.ConstraintString( + format="base64", + maxLength=65535, + ), + description="user data", + min_ver="2.94", + ), + "description": model.StructField( + data_type=model.Reference( + name="description", type=model.OneOfType + ), + min_ver="2.94", + ), + "tags": model.StructField( + data_type=model.Reference(name="tags", type=model.Array), + min_ver="2.94", + ), + "trusted_image_certificates": model.StructField( + data_type=model.Reference( + name="trusted_image_certificates", type=model.OneOfType + ), + min_ver="2.94", + ), + "host": model.StructField( + data_type=model.ConstraintString( + minLength=1, + maxLength=255, + pattern="^[a-zA-Z0-9-._]*$", + ), + min_ver="2.94", + ), + "hypervisor_hostname": model.StructField( + data_type=model.ConstraintString( + minLength=1, + maxLength=255, + pattern="^[a-zA-Z0-9-._]*$", + ), + min_ver="2.94", + ), + "hostname": model.StructField( + data_type=model.ConstraintString( + minLength=1, + maxLength=255, + pattern="^[a-zA-Z0-9-._]*$", + ), + min_ver="2.94", + ), + }, + min_ver="2.94", + ), + model.Array( + reference=model.Reference(name="different_host", type=model.Array), + item_type=model.ConstraintString(format="uuid"), + min_ver="2.94", + ), + model.OneOfType( + reference=model.Reference(name="different_host", type=model.OneOfType), + kinds=[ + model.ConstraintString(format="uuid"), + model.Reference(name="different_host", type=model.Array), + ], + min_ver="2.94", + ), + model.Array( + reference=model.Reference(name="same_host", type=model.Array), + item_type=model.ConstraintString(format="uuid"), + min_ver="2.94", + ), + model.OneOfType( + reference=model.Reference(name="same_host", type=model.OneOfType), + kinds=[ + model.ConstraintString(format=None), + model.Reference(name="same_host", type=model.Array), + ], + min_ver="2.94", + ), + model.Dictionary( + reference=model.Reference(name="query", type=model.Dictionary), + value_type=model.PrimitiveAny(), + min_ver="2.94", + ), + model.OneOfType( + reference=model.Reference(name="query", type=model.OneOfType), + kinds=[ + model.ConstraintString(format=None), + model.Reference(name="query", type=model.Dictionary), + ], + min_ver="2.94", + ), + model.Array( + reference=model.Reference(name="different_cell", type=model.Array), + item_type=model.ConstraintString(format=None), + min_ver="2.94", + ), + model.OneOfType( + reference=model.Reference(name="different_cell", type=model.OneOfType), + kinds=[ + model.ConstraintString(format=None), + model.Reference(name="different_cell", type=model.Array), + ], + min_ver="2.94", + ), + model.OneOfType( + reference=model.Reference( + name="build_near_host_ip", type=model.OneOfType + ), + kinds=[ + model.ConstraintString(format="ipv4"), + model.ConstraintString(format="ipv6"), + ], + min_ver="2.94", + ), + model.Struct( + reference=model.Reference( + name="os:scheduler_hints", type=model.Struct + ), + description="scheduler hints description", + fields={ + "group": model.StructField( + data_type=model.ConstraintString(format="uuid"), min_ver="2.94" + ), + "different_host": model.StructField( + data_type=model.Reference( + name="different_host", type=model.OneOfType + ), + min_ver="2.94", + ), + "same_host": model.StructField( + data_type=model.Reference( + name="same_host", type=model.OneOfType + ), + description="A list of server UUIDs or a server UUID.", + min_ver="2.94", + ), + "query": model.StructField( + data_type=model.Reference(name="query", type=model.OneOfType), + min_ver="2.94", + ), + "target_cell": model.StructField( + data_type=model.ConstraintString( + format="name", minLength=1, maxLength=255 + ), + min_ver="2.94", + ), + "different_cell": model.StructField( + data_type=model.Reference( + name="different_cell", type=model.OneOfType + ), + min_ver="2.94", + ), + "build_near_host_ip": model.StructField( + data_type=model.Reference( + name="build_near_host_ip", type=model.OneOfType + ), + description="Schedule the server on a host in the network specified with", + min_ver="2.94", + ), + "cidr": model.StructField( + data_type=model.ConstraintString( + pattern="^/[0-9a-f.:]+$", + ), + min_ver="2.94", + ), + }, + additional_fields=model.PrimitiveAny(), + min_ver="2.94", + ), + model.Array( + reference=model.Reference(name="different_host", type=model.Array), + item_type=model.ConstraintString(format="uuid"), + min_ver="2.94", + ), + model.OneOfType( + reference=model.Reference(name="different_host", type=model.OneOfType), + kinds=[ + model.ConstraintString(format="uuid"), + model.Reference(name="different_host", type=model.Array), + ], + min_ver="2.94", + ), + model.Array( + reference=model.Reference(name="same_host", type=model.Array), + item_type=model.ConstraintString(format="uuid"), + min_ver="2.94", + ), + model.OneOfType( + reference=model.Reference(name="same_host", type=model.OneOfType), + kinds=[ + model.ConstraintString(format=None), + model.Reference(name="same_host", type=model.Array), + ], + min_ver="2.94", + ), + model.Array( + reference=model.Reference(name="different_cell", type=model.Array), + item_type=model.ConstraintString(format=None), + min_ver="2.94", + ), + model.OneOfType( + reference=model.Reference(name="different_cell", type=model.OneOfType), + kinds=[ + model.ConstraintString(format=None), + model.Reference(name="different_cell", type=model.Array), + ], + min_ver="2.94", + ), + model.Enum( + reference=model.Reference(name="source_type", type=model.Enum), + literals=set(["volume", "image", "snapshot", "blank"]), + base_types=[ + model.ConstraintString, + ], + min_ver="2.94", + ), + model.Enum( + reference=model.Reference(name="destination_type", type=model.Enum), + literals=set(["volume", "local"]), + base_types=[ + model.ConstraintString, + ], + min_ver="2.94", + ), + model.Enum( + reference=model.Reference(name="OS-DCF:diskConfig", type=model.Enum), + literals=set(["AUTO", "MANUAL"]), + base_types=[ + model.ConstraintString, + ], + min_ver="2.94", + ), + model.OneOfType( + reference=model.Reference( + name="build_near_host_ip", type=model.OneOfType + ), + kinds=[ + model.ConstraintString(format="ipv4"), + model.ConstraintString(format="ipv6"), + ], + min_ver="2.94", + ), + model.Struct( + reference=model.Reference( + name="OS-SCH-HNT:scheduler_hints", type=model.Struct + ), + fields={ + "group": model.StructField( + data_type=model.ConstraintString(format="uuid"), min_ver="2.94" + ), + "different_host": model.StructField( + data_type=model.Reference( + name="different_host", type=model.OneOfType + ), + description="A list of server UUIDs or a server UUID.\nSchedule the server on a different host from a set of servers.\nIt is available when `DifferentHostFilter` is available on cloud side.", + min_ver="2.94", + ), + "same_host": model.StructField( + data_type=model.Reference( + name="same_host", type=model.OneOfType + ), + min_ver="2.94", + ), + "query": model.StructField( + data_type=model.Reference(name="query", type=model.OneOfType), + min_ver="2.94", + ), + "target_cell": model.StructField( + data_type=model.ConstraintString( + format="name", minLength=1, maxLength=255 + ), + min_ver="2.94", + ), + "different_cell": model.StructField( + data_type=model.Reference( + name="different_cell", type=model.OneOfType + ), + min_ver="2.94", + ), + "build_near_host_ip": model.StructField( + data_type=model.Reference( + name="build_near_host_ip", type=model.OneOfType + ), + min_ver="2.94", + ), + "cidr": model.StructField( + data_type=model.ConstraintString( + pattern="^/[0-9a-f.:]+$", + ), + min_ver="2.94", + ), + }, + additional_fields=model.PrimitiveAny(), + min_ver="2.94", + ), + EXPECTED_TLA_DATA, +] + + +class TestModel(TestCase): + def setUp(self): + super().setUp() + logging.basicConfig(level=logging.DEBUG) + + def test_model_parse(self): + parser = model.JsonSchemaParser() + (res, types) = parser.parse(SAMPLE_SERVER_SCHEMA) + # TODO: replace with a dedicated checks of parsed schemas + # self.assertEqual(res, EXPECTED_TLA_DATA) + # for expected in EXPECTED_DATA_TYPES: + # if expected not in types: + # for present in types: + # if present.reference and expected.reference: + # if ( + # present.reference.name == expected.reference.name + # and present.reference.type + # == expected.reference.type + # ): + # self.assertEqual(expected, present) + # break + + def test_parse_string_parameter(self): + schema = { + "in": "query", + "name": "tags", + "schema": { + "type": "string", + "format": "regex", + }, + "x-openstack": {"min-ver": "2.26"}, + } + parser = model.OpenAPISchemaParser() + res = parser.parse_parameter(schema) + dt = res.data_type + self.assertIsInstance(res, model.RequestParameter) + self.assertIsInstance(dt, model.ConstraintString) + self.assertEqual("regex", dt.format) + self.assertEqual("query", res.location) + self.assertEqual("tags", res.name) + + def test_parse_string_array_parameter(self): + schema = { + "in": "query", + "name": "tags", + "schema": {"type": "array", "items": {"type": "string"}}, + "style": "form", + "explode": False, + } + parser = model.OpenAPISchemaParser() + res = parser.parse_parameter(schema) + dt = res.data_type + self.assertIsInstance(res, model.RequestParameter) + self.assertIsInstance(dt, model.CommaSeparatedList) + self.assertIsInstance(dt.item_type, model.ConstraintString) + + def test_parse_limit_multitype_parameter(self): + schema = { + "in": "query", + "name": "limit", + "schema": { + "type": ["strng", "integer"], + "format": "^[0-9]*$", + "minimum": 0, + }, + } + parser = model.OpenAPISchemaParser() + res = parser.parse_parameter(schema) + dt = res.data_type + self.assertIsInstance(res, model.RequestParameter) + self.assertIsInstance(dt, model.ConstraintInteger) + self.assertEqual(dt.minimum, 0) + + # def test_microversion(self): + # data: dict | None = None + # with open( + # "codegenerator/tests/unit/model_microversion.yaml", "r" + # ) as fp: + # data = jsonref.replace_refs(yaml.safe_load(fp)) + + # if not data: + # raise RuntimeError + # parser = model.OpenAPISchemaParser() + # schema1, all1 = parser.parse( + # data["components"]["schemas"]["Flavors_Create_20"] + # ) + # schema2, all2 = parser.parse( + # data["components"]["schemas"]["Flavors_Create_21"] + # ) + # schema3, all3 = parser.parse( + # data["components"]["schemas"]["Flavors_Create_255"] + # ) + + # all_all = parser.merge_models(all2, all3) + + # # print(schema3) + # print(all_all) + def test_parse_array_of_array_of_strings(self): + schema = { + "type": ["array", "null"], + "description": "aoaos", + "items": { + "type": "array", + "description": "aos", + "items": {"type": "string"}, + "minItems": 1, + "uniqueItems": True, + }, + "uniqueItems": True, + } + parser = model.OpenAPISchemaParser() + (res, all_models) = parser.parse(schema) + self.assertIsInstance(res, model.OneOfType) + if res: + k = res.kinds[0] + self.assertIsInstance(k, model.Array) + self.assertIsInstance(k.item_type, model.Array) + + def test_server_unshelve(self): + schema = { + "type": "object", + "properties": { + "unshelve": { + "oneOf": [ + { + "type": ["object"], + "properties": { + "availability_zone": { + "oneOf": [ + {"type": ["null"]}, + {"type": "string"}, + ] + }, + "host": {"type": "string"}, + }, + "additionalProperties": False, + }, + {"type": ["null"]}, + ] + } + }, + "additionalProperties": False, + "x-openstack": {"min-ver": "2.91", "action-name": "unshelve"}, + "required": ["unshelve"], + } + parser = model.OpenAPISchemaParser() + (res, all_models) = parser.parse(schema) + self.assertEqual(4, len(all_models)) diff --git a/codegenerator/tests/unit/test_rust_cli.py b/codegenerator/tests/unit/test_rust_cli.py new file mode 100644 index 0000000..c24213d --- /dev/null +++ b/codegenerator/tests/unit/test_rust_cli.py @@ -0,0 +1,88 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import logging +from unittest import TestCase + +from jinja2 import Environment +from jinja2 import FileSystemLoader +from jinja2 import select_autoescape +from jinja2 import StrictUndefined + +from codegenerator import base +from codegenerator import model +from codegenerator import rust_cli + + +class TestRustCliResponseManager(TestCase): + def setUp(self): + super().setUp() + logging.basicConfig(level=logging.DEBUG) + + def test_parse_array_of_array_of_strings(self): + expected_content = """ +/// foo response representation +#[derive(Deserialize, Serialize)] +#[derive(Clone, StructTable)] +struct ResponseData { + /// aoaos + /// + #[serde()] + #[structable(pretty)] + foo: Option<Value>, +} + """ + schema = { + "type": "object", + "properties": { + "foo": { + "type": ["array", "null"], + "description": "aoaos", + "items": { + "type": "array", + "description": "aos", + "items": {"type": "string"}, + "minItems": 1, + "uniqueItems": True, + }, + "uniqueItems": True, + } + }, + } + parser = model.OpenAPISchemaParser() + (_, all_models) = parser.parse(schema) + + cli_rm = rust_cli.ResponseTypeManager() + cli_rm.set_models(all_models) + + env = Environment( + loader=FileSystemLoader("codegenerator/templates"), + autoescape=select_autoescape(), + undefined=StrictUndefined, + ) + env.filters["wrap_markdown"] = base.wrap_markdown + template = env.get_template("rust_cli/response_struct.j2") + + content = template.render( + target_class_name="foo", + response_type_manager=cli_rm, + method=None, + params={}, + is_json_patch=False, + sdk_service_name="srv", + resource_name="res", + operation_type="dummy", + ) + self.assertEqual( + "".join([x.rstrip() for x in expected_content.split()]), + "".join([x.rstrip() for x in content.split()]), + ) diff --git a/codegenerator/tests/unit/test_rust_sdk.py b/codegenerator/tests/unit/test_rust_sdk.py new file mode 100644 index 0000000..44614fd --- /dev/null +++ b/codegenerator/tests/unit/test_rust_sdk.py @@ -0,0 +1,210 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# +import logging +from unittest import TestCase + +from jinja2 import Environment +from jinja2 import FileSystemLoader +from jinja2 import select_autoescape +from jinja2 import StrictUndefined + +from codegenerator import base +from codegenerator import model +from codegenerator import rust_sdk +from codegenerator.common import rust as common_rust +from codegenerator.tests.unit import test_model + + +class TestRustSdkModel(TestCase): + models = [ + model.Struct( + reference=None, + fields={ + "a": model.StructField( + data_type=model.PrimitiveString(), + description="a descr", + is_required=True, + ), + "b": model.StructField( + data_type=model.ConstraintString( + format="foo", minLength=1, maxLength=2, pattern="3" + ) + ), + "c": model.StructField( + data_type=model.ConstraintNumber(format="double") + ), + "d": model.StructField(data_type=model.ConstraintInteger()), + "e": model.StructField(data_type=model.PrimitiveBoolean()), + "f": model.StructField( + data_type=model.Reference(name="f", type=model.OneOfType), + is_required=True, + ), + "g": model.StructField( + data_type=model.Dictionary( + value_type=model.PrimitiveString(), + ) + ), + }, + ), + model.OneOfType( + reference=model.Reference(name="f", type=model.OneOfType), + kinds=[ + model.PrimitiveString(), + model.Reference(name="f_array", type=model.Array), + ], + ), + model.Array( + reference=model.Reference(name="f_array", type=model.Array), + item_type=model.PrimitiveString(), + ), + ] + + # def test_string_type(self): + # # generator = rust_sdk.Generator() + # res = rust_sdk.get_type(model.PrimitiveString()) + # self.assertIsInstance(res, rust_sdk.String) + # self.assertEqual(res.type_hint, "Cow<'a, str>") + # self.assertEqual(res.imports, set(["std::borrow::Cow"])) + + def test_model_string_vec_strings(self): + """Ensure oneOf from vec<string> and string is mapped to vec<string>""" + logging.basicConfig(level=logging.DEBUG) + type_manager = rust_sdk.TypeManager() + type_manager.set_models(self.models) + mod = type_manager.convert_model( + model.Reference(name="f", type=model.OneOfType) + ) + self.assertIsInstance(mod, common_rust.Array) + self.assertIsInstance(mod.item_type, rust_sdk.String) + # print(type_manager.refs) + + def test_model_struct(self): + logging.basicConfig(level=logging.DEBUG) + type_manager = rust_sdk.TypeManager() + type_manager.set_models(self.models) + mod = type_manager.convert_model(self.models[0]) + self.assertIsInstance(mod, rust_sdk.Struct) + self.assertFalse(mod.fields["a"].is_optional) + field_a = mod.fields["a"] + self.assertEqual(field_a.is_optional, False) + self.assertEqual(field_a.description, "a descr") + self.assertEqual(field_a.type_hint, "Cow<'a, str>") + field_b = mod.fields["b"] + self.assertEqual(field_b.is_optional, True) + self.assertEqual(field_b.type_hint, "Option<Cow<'a, str>>") + field_c = mod.fields["c"] + self.assertEqual(field_c.is_optional, True) + self.assertEqual(field_c.type_hint, "Option<f64>") + field_d = mod.fields["d"] + self.assertEqual(field_d.is_optional, True) + self.assertEqual(field_d.type_hint, "Option<i32>") + field_d = mod.fields["d"] + field_e = mod.fields["e"] + self.assertEqual(field_e.is_optional, True) + self.assertEqual(field_e.type_hint, "Option<bool>") + field_f = mod.fields["f"] + self.assertEqual(field_f.is_optional, False) + self.assertEqual(field_f.type_hint, "Vec<Cow<'a, str>>") + field = mod.fields["g"] + self.assertEqual(field.is_optional, True) + self.assertEqual( + field.type_hint, "Option<BTreeMap<Cow<'a, str>, Cow<'a, str>>>" + ) + self.assertEqual(set(["'a"]), mod.lifetimes) + + def test_get_submodels(self): + logging.basicConfig(level=logging.DEBUG) + type_manager = rust_sdk.TypeManager() + type_manager.set_models(test_model.EXPECTED_DATA_TYPES) + # res = type_manager.get_subtypes() + self.assertEqual( + set( + [ + "std::collections::BTreeMap", + "std::borrow::Cow", + "serde::Deserialize", + "serde::Serialize", + "serde_json::Value", + ] + ), + type_manager.get_imports(), + ) + + def test_render_submodels(self): + # expected_subtypes_render = "" + logging.basicConfig(level=logging.DEBUG) + type_manager = rust_sdk.TypeManager() + type_manager.set_models(test_model.EXPECTED_DATA_TYPES) + env = Environment( + loader=FileSystemLoader("codegenerator/templates"), + autoescape=select_autoescape(), + undefined=StrictUndefined, + ) + env.filters["wrap_markdown"] = base.wrap_markdown + + template = env.get_template("rust_sdk/subtypes.j2") + content = template.render(type_manager=type_manager) + + # TODO: implement proper rendering with individual model types + self.assertIsNotNone(content) + + # self.assertEqual( + # "".join([x.rstrip() for x in expected_subtypes_render.split()]), + # "".join([x.rstrip() for x in content.split()]), + # ) + + def test_render_root_type(self): + expected_root_render = """ +#[derive(Builder, Debug, Clone)] +#[builder(setter(strip_option))] +pub struct Request<'a> { + + /// A `server` object. + /// + #[builder(setter(into))] + pub(crate) server: Server<'a>, + + /// scheduler hints description + /// + #[builder(default, setter(into))] + pub(crate) os_scheduler_hints: Option<OsSchedulerHints<'a>>, + + #[builder(default, setter(into))] + pub(crate) os_sch_hnt_scheduler_hints: Option<OsSchHntSchedulerHints<'a>>, + + #[builder(setter(name = "_headers"), default, private)] + _headers: Option<HeaderMap>, +} + """ + logging.basicConfig(level=logging.DEBUG) + type_manager = rust_sdk.TypeManager() + type_manager.set_models(test_model.EXPECTED_DATA_TYPES) + env = Environment( + loader=FileSystemLoader("codegenerator/templates"), + autoescape=select_autoescape(), + undefined=StrictUndefined, + ) + env.filters["wrap_markdown"] = base.wrap_markdown + + template = env.get_template("rust_sdk/request_struct.j2") + content = template.render( + type_manager=type_manager, + method=None, + params={}, + is_json_patch=False, + ) + + self.assertEqual( + "".join([x.rstrip() for x in expected_root_render.split()]), + "".join([x.rstrip() for x in content.split()]), + ) diff --git a/codegenerator/types.py b/codegenerator/types.py new file mode 100644 index 0000000..52d0f55 --- /dev/null +++ b/codegenerator/types.py @@ -0,0 +1,84 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT +# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the +# License for the specific language governing permissions and limitations +# under the License. +# + +from typing import Literal + +from pydantic import BaseModel, ConfigDict, Field + + +OPERATION_TYPE = Literal[ + "list", + "show", + "get", + "create", + "delete", + "set", + "action", + "download", + "upload", + "json", + "find", +] + +SUPPORTED_TARGETS = Literal["rust-sdk", "rust-cli"] + + +class OperationTargetParams(BaseModel): + model_config = ConfigDict(extra="forbid") + # deprecated + alternative_module_path: str | None = None + module_path: str | None = None + # deprecated + alternative_module_name: str | None = None + module_name: str | None = None + sdk_mod_path: str | None = None + sdk_mod_name: str | None = None + cli_mod_path: str | None = None + operation_type: OPERATION_TYPE | None = None + # currently used for actions to find proper response body + operation_name: str | None = None + service_type: str | None = None + api_version: str | None = None + request_key: str | None = None + response_key: str | None = None + response_list_item_key: str | None = None + #: Flag indicating that `find` operation is implemented by the corresponding SDK + find_implemented_by_sdk: bool | None = None + #: Name or the resource `name` field + name_field: str | None = None + #: Flag whether `name` query parameter to the `list` method is supported. + #: Used by SDK to implement `find` method. + name_filter_supported: bool | None = None + #: List module for the find + list_mod: str | None = None + + +class OperationModel(BaseModel): + model_config = ConfigDict(extra="forbid") + operation_id: str + spec_file: str = Field(default=None) + operation_type: OPERATION_TYPE | None = None + targets: dict[SUPPORTED_TARGETS, OperationTargetParams] + + +class ResourceModel(BaseModel): + model_config = ConfigDict(extra="forbid") + spec_file: str + api_version: str | None = None + operations: dict[str, OperationModel] + extensions: dict[str, dict] = Field(default={}) + + +class Metadata(BaseModel): + model_config = ConfigDict(extra="forbid") + resources: dict[str, ResourceModel] diff --git a/doc/requirements.txt b/doc/requirements.txt new file mode 100644 index 0000000..a83a2ff --- /dev/null +++ b/doc/requirements.txt @@ -0,0 +1,4 @@ +docutils>=0.11 # OSI-Approved Open Source, Public Domain +openstackdocstheme>=2.2.1 # Apache-2.0 +reno>=3.1.0 # Apache-2.0 +sphinx>=2.0.0,!=2.1.0 # BSD diff --git a/doc/source/ansible.rst b/doc/source/ansible.rst new file mode 100644 index 0000000..e06d742 --- /dev/null +++ b/doc/source/ansible.rst @@ -0,0 +1,10 @@ +Ansible Modules +=============== + +Currently a POC generation of Ansible modules is covered +by the project. Since AnsibleModules depends heavily on +the OpenStackSDK implementation a work need to be done to +improve generation by comparing SDK implementation with +the OpenAPI spec. + +TODO diff --git a/doc/source/conf.py b/doc/source/conf.py new file mode 100644 index 0000000..1ca6f80 --- /dev/null +++ b/doc/source/conf.py @@ -0,0 +1,97 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import sys +import warnings + +sys.path.insert(0, os.path.abspath("../..")) +sys.path.insert(0, os.path.abspath(".")) + +# -- General configuration ---------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +extensions = [ + "sphinx.ext.autodoc", + "openstackdocstheme", +] + +# openstackdocstheme options +openstackdocs_repo_name = "openstack/codegenerator" +# openstackdocs_pdf_link = True +# openstackdocs_use_storyboard = False +html_theme = "openstackdocs" + +# autodoc generation is a bit aggressive and a nuisance when doing heavy +# text edit cycles. +# execute "export SPHINX_DEBUG=1" in your terminal to disable + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix of source filenames. +source_suffix = ".rst" + +# The master toctree document. +master_doc = "index" + +# General information about the project. +copyright = "2023, Various members of the OpenStack Foundation" + +# If true, '()' will be appended to :func: etc. cross-reference text. +add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +add_module_names = True + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "native" + +autodoc_member_order = "bysource" + +# Include both the class and __init__ docstrings when describing the class +autoclass_content = "both" + +# Don't document type hints as they're too noisy +autodoc_typehints = "none" + +# Locations to exclude when looking for source files. +exclude_patterns: list = [] + +# -- Options for HTML output ---------------------------------------------- + +# Don't let openstackdocstheme insert TOCs automatically. +theme_include_auto_toc = False + +# -- Options for LaTeX output --------------------------------------------- + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, author, documentclass +# [howto/manual]). +latex_documents = [ + ( + "index", + "doc-openstacksdk.tex", + "OpenStackCodegenerator Documentation", + "OpenStack Foundation", + "manual", + ), +] + +# Allow deeper levels of nesting for \begin...\end stanzas +latex_elements = {"maxlistdepth": 10} + +# Disable usage of xindy https://bugzilla.redhat.com/show_bug.cgi?id=1643664 +latex_use_xindy = False diff --git a/doc/source/index.rst b/doc/source/index.rst new file mode 100644 index 0000000..f15fbe7 --- /dev/null +++ b/doc/source/index.rst @@ -0,0 +1,21 @@ +======================= +OpenStack CodeGenerator +======================= + +Primary goal of the project is to simplify maintainers life by generating +complete or at least parts of the code. + +It is currently in a phase of finding final structure there currently is not +an official OpenStack project (to allow subprojects split or merge). Once the +structure stabilizes a request to include this as official project would be +made. + +.. toctree:: + :maxdepth: 1 + + openapi + metadata + rust_sdk + rust_cli + ansible + osc diff --git a/doc/source/metadata.rst b/doc/source/metadata.rst new file mode 100644 index 0000000..b7c898f --- /dev/null +++ b/doc/source/metadata.rst @@ -0,0 +1,15 @@ +Metadata +======== + +Generating the code requires providing certain amount of information like: + +- OpenAPI spec path +- operation from the spec +- target for generation +- output location +- additional information in case of dependencies between generation targets. + +In order to simplify that a metadata file can provide all +this information. + +TODO diff --git a/doc/source/openapi.rst b/doc/source/openapi.rst new file mode 100644 index 0000000..46c5b77 --- /dev/null +++ b/doc/source/openapi.rst @@ -0,0 +1,186 @@ +OpenApi Schema +============== + +CodeGenerator is able to generate OpenAPI specs for certain services by +inspecting their code. This requires service package being installed in the +environment where the generator is running. It then tries to initialize service +application and for supported runtimes scans for the exposed operations. At the +moment following services are covered: + +- Nova + +- Neutron + +- Cinder + +- Glance + +- Keystone + +- Octavia + + +Generator can be invoked after installing it as a regular python project with +dependencies + +.. code-block:: console + + openstack-codegenerator --target openapi-spec --work-dir wrk --service-type compute + +The generator is having possibility to additionally parse rendered service +API-REF HTML documentation and supplement descriptions in the generated +OpenApi spec by trying to find corresponding information in the html. + +.. code-block:: console + + openstack-codegenerator --target openapi-spec --work-dir wrk --service-type compute --api-ref-src <PATH_TO_RENDERED_DOC>.html + + +Another project for rendering generated OpenAPI specs in the style +similar (but not the same way) to currently used os-api-ref: +`https://github.com/gtema/openstack-openapi`. It implements a +Sphinx extension that reads spec file and converts it to internal +sphinx directives to get a proper structure in the rendered HTML +and styles it using `BootstrapV5` library. Sample rendering can be +seen under `https://gtema.github.io/openstack-openapi/` + + +Highlevel description (for contributor) +--------------------------------------- + +Base generator +:class:`~codegenerator.openapi.base.OpenStackServerSourceGenerator` is +supporting WSGI + Routes based application out of box. For such applications +it tries to get the main router from wich all exposed routes are being +analysed. During routes processing generator is searching for supported +decorators and frameworks in order to extract as most information about the +operation as possible: + +- url +- method +- path parameters +- query parameters +- expected body jsonschema +- response jsonschema +- expected response and error codes +- operation description (docstrings) + +Generator for every covered OpenStack service is inherits from the Base +generator (i.e. :class:`~codegenerator.openapi.nova.NovaGenerator`. It is +expected that `init` method will perform service setup activities (i.e. +database creation or config file preparation whenever required) and sets the +main application router. `generate` method of the class is then being invoked +and it reads current spec file (if present to update it) and loops over all +exposed routes. For each route a dedicated method `_process_route` is +invoked, which in turn invoke multiple additional methods for parameters or +body schemas processing. + +After processing when api-ref html is available a dedicated method +:class:`~codegenerator.openapi.utils.merge_api_ref_doc` can be called to add +available descriptions (operation, parameters). + +.. note:: + Since all services use `oslo_config` and `oslo_policy` libraries which rely + on global state they race with each other. In order to avoid this processing + rely on multiprocessing to isolate services. + + +Nova +---- + +Source code of Nova currently provides full information about exposed routes +and query/path parameters, as well as jsonschema of request body. Sadly it does +not contain jsonschemas of the responses. CodeGenerator at the moment covers +those missing schemas directly in the code and injects them into the schema via +:class:`~codegenerator.openapi.nova.NovaGenerator:_get_schema_ref` + +After stabilization it is expected to move implemented schemas into the Nova +source code base. + + +Cinder +------- + +Cinder is very similar to Nova so everything mentioned above is applicable +here as well. + +for Cinder at the moment all operations are duplicated under +`v3/${project_id}/...` and `v3/...`. For the sake of standartization +project_id urls are excluded from the produces spec file. + + +Glance +------ + +Glance is also using `routes` for exposing application. However in difference +to Nova and Cinder it does not describe request parameters of bodies in an +expected way. Current implementation of the Glance generator therefore is +looking at the request serializer and deserializer attached to the operation +controllers. When this information is present and contain usable jsonschema +it is being used. In other cases similar approach to Nova with hardcoding +response information is being used. But since Glance code base contain +certain useful jsonschemas (not connected in the routes) generator gets those +schemas directly from the code (where the mapping is known). + + +Keystone +-------- + +This service is using `Flask` framework which gives similar capabilities to +the `routes`. However here there are no body information at all (neither +Request nor Response). Also here there are certain jsonschemas found directly +in the Keystone code base and connected for the schema generation. + + +Neutron +------- + +This is where things are getting more challenging. + +Neutron requires having DB provisioned and an in-memory DB seems not to be +possible due to technics for the DB communication. In addition to that config +file enabling desired extensions is expected. All this activities are covered +in :class:`~codegenrator.openapi.neutron.NeutronGenerator:setup_neutron`. +According to the current information it is not possible to have all possible +Neutron extensions and plugins enabled at the same time. This is solved by +generator spinning multiple subprocesses that bootstrap Neutron with different +configuration and then merge results. This is handled by spinning up Neutron +few times with independent configurations and merging resulting spec. + +Additional challenge in Neutron is that it does not use `routes` to expose +operations directly, but is having a mix of `routes` based operations for +extensions and `pecan` app for the base functionality. Since the `pecan` +framework is based on a purely dynamic routing there is no possibility to +extract information about exposed routes by doing code inspection. Luckily only +base operations (router/net/subnet) are implemented this way. Therefore +generator registers known `pecan` operations into the extensions router and +normal generator flow is being invoked. + +Next challenge is that for Neutron there is no description of bodies at all, +but certain controllers are having `API_DEFINITION` attached. While this is not +a jsonschema at all it can be used to create one where possible. Sadly there is +still sometime no possibility to properly estimate whether certain operation is +exposed and functioning or it is exposed but fails permanently due to the fact, +that `API_DEFINITION` extrapolation fails for this operation. +:class:`~codegenerator.openapi.neutron.get_schema` method is responsible for +conversion of the `API_DEFINITION` into the jsonschema, but is not able to work +perfectly until additional work is invested. + +Certain additional operations (addRouterInterface, addExtraRoute, ...) are not +having any information available and require to be also hardcodede in the +generator. + + +Octavia +------- + +Octavia is also based on the `pecan` with its dynamic routing, but the +majority of controllers are available for scanning due to the source code +classes hierarchy. To keep the generation process close to generics +:class:`~codegenerator.openapi.octavia.OctaviaGenerator` is constructing +`routes` router from this information and adds few known exceptions. For the +produced routing table generic process is being invoked which is then looking +at the `WSME` decorators attached to the exposed operations. Since `WSME` +schema is not a jsonschema on its own but it can be considered as an +alternative to jsonschema a naive conversion is implemented in +:class:`~codegenerator.openapi.base._convert_wsme_to_jsonschema`. diff --git a/doc/source/osc.rst b/doc/source/osc.rst new file mode 100644 index 0000000..44b3a54 --- /dev/null +++ b/doc/source/osc.rst @@ -0,0 +1,10 @@ +OpenStackClient (python) +======================== + +One of the initial targets of the code generation was to +be able to generate code for OSC knowing what SDK +supports. Current implementation in the generator is +inspecting the OpenStackSDK code and generates CLI code +based on that. + +TODO diff --git a/doc/source/rust_cli.rst b/doc/source/rust_cli.rst new file mode 100644 index 0000000..8647b91 --- /dev/null +++ b/doc/source/rust_cli.rst @@ -0,0 +1,22 @@ +OSC in Rust +=========== + +A new experimental CLI is generated from the OpenAPI +specs. Due to the fact that it is fully automatically +generated it differs from the current OpenStackClient in +following aspects: + +- enforced naming convention +- similar UX for all services +- improved performance +- improved UX +- ... + + +Currently the generated code is hosted under +`https://github.com/gtema/openstack` It covers all +services for which OpenAPI specs exist with version +dicovery and partial microversion negotiation. + + +TODO diff --git a/doc/source/rust_sdk.rst b/doc/source/rust_sdk.rst new file mode 100644 index 0000000..8c6ef72 --- /dev/null +++ b/doc/source/rust_sdk.rst @@ -0,0 +1,13 @@ +Rust SDK for OpenStack +====================== + + +Having OpenAPI specs of the services it is possible to +generate SDK for Rust automatically. + +Currently the generated code is hosted under +`https://github.com/gtema/openstack` It covers all +services for which OpenAPI specs exist with version +dicovery and partial microversion negotiation. + +TODO diff --git a/releasenotes/notes/.placeholder b/releasenotes/notes/.placeholder new file mode 100644 index 0000000..e69de29 diff --git a/releasenotes/source/_static/.placeholder b/releasenotes/source/_static/.placeholder new file mode 100644 index 0000000..e69de29 diff --git a/releasenotes/source/_templates/.placeholder b/releasenotes/source/_templates/.placeholder new file mode 100644 index 0000000..e69de29 diff --git a/releasenotes/source/conf.py b/releasenotes/source/conf.py new file mode 100644 index 0000000..2dcbcff --- /dev/null +++ b/releasenotes/source/conf.py @@ -0,0 +1,302 @@ +# -*- coding: utf-8 -*- +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# OpenStackCodegenerator Release Notes documentation build configuration file, created +# by sphinx-quickstart on Tue Nov 3 17:40:50 2015. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# sys.path.insert(0, os.path.abspath('.')) + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +from sphinx.util import logging + +# According to the discussion in +# https://github.com/sphinx-doc/sphinx/issues/10112 this may be applied as a +# dirty hack until the issue with replacing extlinks is resolved +linklogger = logging.getLogger("sphinx.ext.extlinks") +linklogger.setLevel(40) # Ignore messages less severe than ERROR + +extensions = [ + "openstackdocstheme", + "reno.sphinxext", + "sphinx.ext.extlinks", +] + +# openstackdocstheme options +openstackdocs_repo_name = "openstack/codegenerator" +openstackdocs_use_storyboard = False +openstackdocs_auto_name = False + +# Set aliases for extlinks +# * lpbug - generic Launchpad bug :lpbug:`123456` +# * oscbp - OSC blueprints :oscbp:`Blue Print <bp-name>` +# * oscdoc - OSC Docs :oscdoc:`Comamnd List <command-list>` +extlinks = {} + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# The suffix of source filenames. +source_suffix = ".rst" + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = "OpenStackCodegenerator Release Notes" +copyright = "2024, OpenStackCodegenerator Developers" + +# Release notes are version independent. +# The full version, including alpha/beta/rc tags. +release = "" +# The short X.Y version. +version = "" + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "native" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "openstackdocs" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "display_toc": False, +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# "<project> v<release> documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a <link> tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Output file base name for HTML help builder. +htmlhelp_basename = "OpenStackCodegeneratorReleaseNotesdoc" + + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + "index", + "OpenStackCodegeneratorReleaseNotes.tex", + "OpenStackCodegenerator Release Notes Documentation", + "OpenStackCodegenerator Developers", + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + "index", + "openstackclientreleasenotes", + "OpenStackCodegenerator Release Notes Documentation", + ["OpenStackCodegenerator Developers"], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + "index", + "OpenStackCodegeneratorReleaseNotes", + "OpenStackclient Release Notes Documentation", + "OpenStackclient Developers", + "OpenStackCodegeneratorReleaseNotes", + "A unified command-line client for OpenStack.", + "Miscellaneous", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + +# -- Options for Internationalization output ------------------------------ +locale_dirs = ["locale/"] diff --git a/releasenotes/source/index.rst b/releasenotes/source/index.rst new file mode 100644 index 0000000..b990f32 --- /dev/null +++ b/releasenotes/source/index.rst @@ -0,0 +1,16 @@ +==================================== +OpenStackCodegenerator Release Notes +==================================== + +.. toctree:: + :maxdepth: 1 + + unreleased + +OpenStack Releases +------------------ + +Further details for historical OpenStack releases are found at the +`OpenStack Releases`_ page. + +.. _`OpenStack Releases`: http://releases.openstack.org/ diff --git a/releasenotes/source/unreleased.rst b/releasenotes/source/unreleased.rst new file mode 100644 index 0000000..cb3cccf --- /dev/null +++ b/releasenotes/source/unreleased.rst @@ -0,0 +1,5 @@ +===================== +Current Release Notes +===================== + +.. release-notes:: diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..3bbabbf --- /dev/null +++ b/requirements.txt @@ -0,0 +1,16 @@ +Jinja2>=3.0 # BSD +jsonref>=1.0 # MIT +openapi_core>=0.17 # BSD +pydantic>=2.6 # MIT +ruamel.yaml>=0.18 # MIT +jsonschema>=4.19 # MIT +beautifulsoup4>=4.0 # MIT +# convert html into markdown +markdownify>=0.11 # MIT +# wrap markdown when rendering +mdformat>=0.7 # MIT +# Get sphinx docstrings from source code +sphinx>=2.0.0,!=2.1.0 # BSD +wsgi-intercept>=1.13 # MIT +oslotest>=5.0.0 # Apache-2.0 +openstacksdk>=2.0 # Apache-2.0 diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..570e72c --- /dev/null +++ b/setup.cfg @@ -0,0 +1,55 @@ +[metadata] +name = openstack-codegenerator +summary = Generator for OpenStack stuff +description_file = + README.rst +author = OpenStack +author_email = openstack-discuss@lists.openstack.org +home_page = https://docs.openstack.org/openstacksdk/ +classifier = + Environment :: OpenStack + Intended Audience :: Information Technology + Intended Audience :: System Administrators + License :: OSI Approved :: Apache Software License + Operating System :: POSIX :: Linux + Programming Language :: Python + Programming Language :: Python :: 3 + Programming Language :: Python :: 3.10 + Programming Language :: Python :: 3.11 + +python_requires = >=3.10 + +[files] +packages = + codegenerator + +[entry_points] +console_scripts = + openstack-codegenerator = codegenerator.cli:main + +[extras] +block-storage = + cinder>=23.0 +compute = + nova>=28.0 +identity = + keystone>=23.0 +image = + glance>=27.0 +load-balancing = + octavia>=11.0 +network = + neutron>=23.1 + neutron-vpnaas>=23.0 +placement = + openstack-placement>=10.0 + +[mypy] +show_column_numbers = true +show_error_context = true +ignore_missing_imports = true +# follow_imports = normal +follow_imports = skip +incremental = true +check_untyped_defs = true +warn_unused_ignores = true diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..df328c3 --- /dev/null +++ b/setup.py @@ -0,0 +1,17 @@ +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or +# implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# THIS FILE IS MANAGED BY THE GLOBAL REQUIREMENTS REPO - DO NOT EDIT +import setuptools + +setuptools.setup(setup_requires=["pbr>=2.0.0"], pbr=True) diff --git a/test-requirements.txt b/test-requirements.txt new file mode 100644 index 0000000..f5f8672 --- /dev/null +++ b/test-requirements.txt @@ -0,0 +1,2 @@ +flake8-import-order==0.18.2 # LGPLv3 +stestr>=1.0.0 # Apache-2.0 diff --git a/tools/generate_openapi_specs.sh b/tools/generate_openapi_specs.sh new file mode 100755 index 0000000..4d31d73 --- /dev/null +++ b/tools/generate_openapi_specs.sh @@ -0,0 +1,30 @@ +#!/usr/bin/bash -e +# Generate OpenAPI specs for all supported services consuming built API-REFs in the corresponding checkouts + +SERVICE=$1 + +API_REF_BUILD_ROOT=~/workspace/opendev/openstack + +if [ -z "$1" -o "$1" = "compute" ]; then + openstack-codegenerator --work-dir wrk --target openapi-spec --service-type compute --api-ref-src ${API_REF_BUILD_ROOT}/nova/api-ref/build/html/index.html +fi +if [ -z "$1" -o "$1" = "network" ]; then + openstack-codegenerator --work-dir wrk --target openapi-spec --service-type network --api-ref-src ${API_REF_BUILD_ROOT}/neutron-lib/api-ref/build/html/v2/index.html +fi +if [ -z "$1" -o "$1" = "block-storage" ]; then + openstack-codegenerator --work-dir wrk --target openapi-spec --service-type volume --api-ref-src ${API_REF_BUILD_ROOT}/cinder/api-ref/build/html/v3/index.html +fi +if [ -z "$1" -o "$1" = "image" ]; then + openstack-codegenerator --work-dir wrk --target openapi-spec --service-type image --api-ref-src ${API_REF_BUILD_ROOT}/glance/api-ref/build/html/v2/index.html + sed -i "s|\[API versions call\](../versions/index.html#versions-call)|API versions call|g" wrk/openapi_specs/image/v2.yaml +fi +if [ -z "$1" -o "$1" = "identity" ]; then + openstack-codegenerator --work-dir wrk --target openapi-spec --service-type identity --api-ref-src ${API_REF_BUILD_ROOT}/keystone/api-ref/build/html/v3/index.html +fi +if [ -z "$1" -o "$1" = "load-balancing" ]; then + openstack-codegenerator --work-dir wrk --target openapi-spec --service-type load-balancing --api-ref-src ${API_REF_BUILD_ROOT}/octavia/api-ref/build/html/v2/index.html +fi +if [ -z "$1" -o "$1" = "placement" ]; then + openstack-codegenerator --work-dir wrk --target openapi-spec --service-type placement --api-ref-src ${API_REF_BUILD_ROOT}/placement/api-ref/build/html/index.html + sed -i "s/(?expanded=delete-resource-provider-inventories-detail#delete-resource-provider-inventories)//" wrk/openapi_specs/placement/v1.yaml +fi diff --git a/tools/generate_rust.sh b/tools/generate_rust.sh new file mode 100755 index 0000000..acfdff3 --- /dev/null +++ b/tools/generate_rust.sh @@ -0,0 +1,13 @@ +#!/usr/bin/bash -e + +openstack-codegenerator --work-dir metadata --target metadata --openapi-yaml-spec wrk/openapi_specs/block-storage/v3.yaml --service-type block-storage +openstack-codegenerator --work-dir metadata --target metadata --openapi-yaml-spec wrk/openapi_specs/compute/v2.yaml --service-type compute +openstack-codegenerator --work-dir metadata --target metadata --openapi-yaml-spec wrk/openapi_specs/identity/v3.yaml --service-type identity +openstack-codegenerator --work-dir metadata --target metadata --openapi-yaml-spec wrk/openapi_specs/image/v2.yaml --service-type image +openstack-codegenerator --work-dir metadata --target metadata --openapi-yaml-spec wrk/openapi_specs/network/v2.yaml --service-type network + +tools/generate_rust_block_storage.sh +tools/generate_rust_compute.sh +tools/generate_rust_identity.sh +tools/generate_rust_image.sh +tools/generate_rust_network.sh diff --git a/tools/generate_rust_block_storage.sh b/tools/generate_rust_block_storage.sh new file mode 100755 index 0000000..b5f9a18 --- /dev/null +++ b/tools/generate_rust_block_storage.sh @@ -0,0 +1,20 @@ +#!/usr/bin/bash -e +# + +WRK_DIR=wrk +METADATA=metadata +DST=~/workspace/github/gtema/openstack +NET_RESOURCES=( + "volume" + "type" +) + +openstack-codegenerator --work-dir ${WRK_DIR} --target rust-sdk --metadata ${METADATA}/block-storage_metadata.yaml --service block-storage +openstack-codegenerator --work-dir ${WRK_DIR} --target rust-cli --metadata ${METADATA}/block-storage_metadata.yaml --service block-storage + + +for resource in "${NET_RESOURCES[@]}"; do + cp -av "${WRK_DIR}/rust/openstack_sdk/src/api/block_storage/v3/${resource}" ${DST}/openstack_sdk/src/api/block_storage/v3 + cp -av "${WRK_DIR}/rust/openstack_sdk/src/api/block_storage/v3/${resource}.rs" ${DST}/openstack_sdk/src/api/block_storage/v3 + cp -av "${WRK_DIR}/rust/openstack_cli/src/block_storage/v3/${resource}" ${DST}/openstack_cli/src/block_storage/v3 +done; diff --git a/tools/generate_rust_compute.sh b/tools/generate_rust_compute.sh new file mode 100755 index 0000000..b02acc6 --- /dev/null +++ b/tools/generate_rust_compute.sh @@ -0,0 +1,23 @@ +#!/usr/bin/bash -e + +WRK_DIR=wrk +METADATA=metadata +DST=~/workspace/github/gtema/openstack +NET_RESOURCES=( + "extension" + "flavor" + "aggregate" + "availability_zone" + "hypervisor" + "keypair" + "server" +) + +openstack-codegenerator --work-dir ${WRK_DIR} --target rust-sdk --metadata ${METADATA}/compute_metadata.yaml --service compute +openstack-codegenerator --work-dir ${WRK_DIR} --target rust-cli --metadata ${METADATA}/compute_metadata.yaml --service compute + +for resource in "${NET_RESOURCES[@]}"; do + cp -av "${WRK_DIR}/rust/openstack_sdk/src/api/compute/v2/${resource}" ${DST}/openstack_sdk/src/api/compute/v2 + cp -av "${WRK_DIR}/rust/openstack_sdk/src/api/compute/v2/${resource}.rs" ${DST}/openstack_sdk/src/api/compute/v2 + cp -av "${WRK_DIR}/rust/openstack_cli/src/compute/v2/${resource}" ${DST}/openstack_cli/src/compute/v2 +done; diff --git a/tools/generate_rust_identity.sh b/tools/generate_rust_identity.sh new file mode 100755 index 0000000..28a703e --- /dev/null +++ b/tools/generate_rust_identity.sh @@ -0,0 +1,28 @@ +#!/usr/bin/bash -e + +WRK_DIR=wrk +METADATA=metadata +DST=~/workspace/github/gtema/openstack +NET_RESOURCES=( + "auth" + "group" + "os_federation" + "endpoint" + "region" + "role_assignment" + "role_inference" + "role" + "service" + "project" + "user" +) + +openstack-codegenerator --work-dir ${WRK_DIR} --target rust-sdk --metadata ${METADATA}/identity_metadata.yaml --service identity +openstack-codegenerator --work-dir ${WRK_DIR} --target rust-cli --metadata ${METADATA}/identity_metadata.yaml --service identity + + +for resource in "${NET_RESOURCES[@]}"; do + cp -av "${WRK_DIR}/rust/openstack_sdk/src/api/identity/v3/${resource}" ${DST}/openstack_sdk/src/api/identity/v3 + cp -av "${WRK_DIR}/rust/openstack_sdk/src/api/identity/v3/${resource}.rs" ${DST}/openstack_sdk/src/api/identity/v3 + cp -av "${WRK_DIR}/rust/openstack_cli/src/identity/v3/${resource}" ${DST}/openstack_cli/src/identity/v3 +done; diff --git a/tools/generate_rust_image.sh b/tools/generate_rust_image.sh new file mode 100755 index 0000000..cd12e71 --- /dev/null +++ b/tools/generate_rust_image.sh @@ -0,0 +1,18 @@ +#!/usr/bin/bash -e + +WRK_DIR=wrk +METADATA=metadata +DST=~/workspace/github/gtema/openstack +NET_RESOURCES=( + "image" + "schema" +) + +openstack-codegenerator --work-dir ${WRK_DIR} --target rust-sdk --metadata ${METADATA}/image_metadata.yaml --service image +openstack-codegenerator --work-dir ${WRK_DIR} --target rust-cli --metadata ${METADATA}/image_metadata.yaml --service image + +for resource in "${NET_RESOURCES[@]}"; do + cp -av "${WRK_DIR}/rust/openstack_sdk/src/api/image/v2/${resource}" ${DST}/openstack_sdk/src/api/image/v2 + cp -av "${WRK_DIR}/rust/openstack_sdk/src/api/image/v2/${resource}.rs" ${DST}/openstack_sdk/src/api/image/v2 + cp -av "${WRK_DIR}/rust/openstack_cli/src/image/v2/${resource}" ${DST}/openstack_cli/src/image/v2 +done; diff --git a/tools/generate_rust_network.sh b/tools/generate_rust_network.sh new file mode 100755 index 0000000..6569f29 --- /dev/null +++ b/tools/generate_rust_network.sh @@ -0,0 +1,27 @@ +#!/usr/bin/bash -e + +WRK_DIR=wrk +METADATA=metadata +DST=~/workspace/github/gtema/openstack +NET_RESOURCES=( + "availability_zone" + "extension" + "floatingip" + "network" + "port" + "router" + "subnet" +) + +openstack-codegenerator --work-dir ${WRK_DIR} --target rust-sdk --metadata ${METADATA}/network_metadata.yaml --service network +openstack-codegenerator --work-dir ${WRK_DIR} --target rust-cli --metadata ${METADATA}/network_metadata.yaml --service network + + +for resource in "${NET_RESOURCES[@]}"; do +# openstack-codegenerator --work-dir ${WRK_DIR} --target rust-sdk --metadata ${METADATA}/network_metadata.yaml --service network # --resource ${resource} +# openstack-codegenerator --work-dir ${WRK_DIR} --target rust-cli --metadata ${METADATA}/network_metadata.yaml --service network # --resource ${resource} + + cp -av "${WRK_DIR}/rust/openstack_sdk/src/api/network/v2/${resource}" ${DST}/openstack_sdk/src/api/network/v2 + cp -av "${WRK_DIR}/rust/openstack_sdk/src/api/network/v2/${resource}.rs" ${DST}/openstack_sdk/src/api/network/v2 + cp -av "${WRK_DIR}/rust/openstack_cli/src/network/v2/${resource}" ${DST}/openstack_cli/src/network/v2 +done; diff --git a/tools/generate_rust_swift.sh b/tools/generate_rust_swift.sh new file mode 100755 index 0000000..b8a4b10 --- /dev/null +++ b/tools/generate_rust_swift.sh @@ -0,0 +1,47 @@ +#!/usr/bin/bash -e +# +DATA=( +# Object Store +# sdk +# account +"--openapi-yaml-spec openapi_specs/object-store/swift.spec.yaml --openapi-operation-id account.get --target rust-sdk --service-type object-store --api-version v1 --alternative-target-name account" +"--openapi-yaml-spec openapi_specs/object-store/swift.spec.yaml --openapi-operation-id account.head --target rust-sdk --service-type object-store --api-version v1 --alternative-target-name account" +"--openapi-yaml-spec openapi_specs/object-store/swift.spec.yaml --openapi-operation-id account.post --target rust-sdk --service-type object-store --api-version v1 --alternative-target-name account" +"--openapi-yaml-spec openapi_specs/object-store/swift.spec.yaml --openapi-operation-id account.delete --target rust-sdk --service-type object-store --api-version v1 --alternative-target-name account" +# container +"--openapi-yaml-spec openapi_specs/object-store/swift.spec.yaml --openapi-operation-id container.get --target rust-sdk --service-type object-store --api-version v1 --alternative-target-name container" +"--openapi-yaml-spec openapi_specs/object-store/swift.spec.yaml --openapi-operation-id container.head --target rust-sdk --service-type object-store --api-version v1 --alternative-target-name container" +"--openapi-yaml-spec openapi_specs/object-store/swift.spec.yaml --openapi-operation-id container.put --target rust-sdk --service-type object-store --api-version v1 --alternative-target-name container" +"--openapi-yaml-spec openapi_specs/object-store/swift.spec.yaml --openapi-operation-id container.post --target rust-sdk --service-type object-store --api-version v1 --alternative-target-name container" +"--openapi-yaml-spec openapi_specs/object-store/swift.spec.yaml --openapi-operation-id container.delete --target rust-sdk --service-type object-store --api-version v1 --alternative-target-name container" +# object +"--openapi-yaml-spec openapi_specs/object-store/swift.spec.yaml --openapi-operation-id object.head --target rust-sdk --service-type object-store --api-version v1 --alternative-target-name object" +"--openapi-yaml-spec openapi_specs/object-store/swift.spec.yaml --openapi-operation-id object.get --target rust-sdk --service-type object-store --api-version v1 --alternative-target-name object" +"--openapi-yaml-spec openapi_specs/object-store/swift.spec.yaml --openapi-operation-id object.put --target rust-sdk --service-type object-store --api-version v1 --alternative-target-name object" +"--openapi-yaml-spec openapi_specs/object-store/swift.spec.yaml --openapi-operation-id object.post --target rust-sdk --service-type object-store --api-version v1 --alternative-target-name object" +"--openapi-yaml-spec openapi_specs/object-store/swift.spec.yaml --openapi-operation-id object.delete --target rust-sdk --service-type object-store --api-version v1 --alternative-target-name object" + +# cli +# account +"--openapi-yaml-spec openapi_specs/object-store/swift.spec.yaml --openapi-operation-id account.head --target rust-cli --service-type object-store --api-version v1 --alternative-target-name account" +"--openapi-yaml-spec openapi_specs/object-store/swift.spec.yaml --openapi-operation-id account.post --target rust-cli --service-type object-store --api-version v1 --command-type set --alternative-target-name account" + +# container +"--openapi-yaml-spec openapi_specs/object-store/swift.spec.yaml --openapi-operation-id account.get --target rust-cli --service-type object-store --api-version v1 --alternative-target-name container --sdk-mod-path account::get --command-type list" +"--openapi-yaml-spec openapi_specs/object-store/swift.spec.yaml --openapi-operation-id container.head --target rust-cli --service-type object-store --api-version v1 --alternative-target-name container --command-type show" +"--openapi-yaml-spec openapi_specs/object-store/swift.spec.yaml --openapi-operation-id container.post --target rust-cli --service-type object-store --api-version v1 --alternative-target-name container --command-type set" +"--openapi-yaml-spec openapi_specs/object-store/swift.spec.yaml --openapi-operation-id container.put --target rust-cli --service-type object-store --api-version v1 --alternative-target-name container --command-type create" +"--openapi-yaml-spec openapi_specs/object-store/swift.spec.yaml --openapi-operation-id container.delete --target rust-cli --service-type object-store --api-version v1 --alternative-target-name container --command-type delete" + +#object +"--openapi-yaml-spec openapi_specs/object-store/swift.spec.yaml --openapi-operation-id container.get --target rust-cli --service-type object-store --api-version v1 --alternative-target-name object --sdk-mod-path container::get --command-type list" +"--openapi-yaml-spec openapi_specs/object-store/swift.spec.yaml --openapi-operation-id object.put --target rust-cli --service-type object-store --api-version v1 --alternative-target-name object --command-type upload" +"--openapi-yaml-spec openapi_specs/object-store/swift.spec.yaml --openapi-operation-id object.get --target rust-cli --service-type object-store --api-version v1 --alternative-target-name object --command-type download" +"--openapi-yaml-spec openapi_specs/object-store/swift.spec.yaml --openapi-operation-id object.head --target rust-cli --service-type object-store --api-version v1 --alternative-target-name object --command-type show" +"--openapi-yaml-spec openapi_specs/object-store/swift.spec.yaml --openapi-operation-id object.delete --target rust-cli --service-type object-store --api-version v1 --alternative-target-name object --command-type delete" +) + +for item in "${DATA[@]}"; do + python codegenerator/cli.py $item --work-dir wrk +done; + diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..fcd0683 --- /dev/null +++ b/tox.ini @@ -0,0 +1,79 @@ +[tox] +minversion = 4.3.0 +envlist = pep8,py311 +ignore_basepython_conflict=True + +[testenv] +description = + Run unit tests. +package = editable +passenv = +setenv = + LANG=en_US.UTF-8 + LANGUAGE=en_US:en +install_command = python -I -m pip install -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} {opts} {packages} +deps = + -r{toxinidir}/test-requirements.txt + -r{toxinidir}/requirements.txt +commands = + # A very dirty hack to bypass dependency conflict of openapi-core/jsonchema/referencing + pip install -U openapi-core + stestr run {posargs} + stestr slowest + +[testenv:functional{,-py310,-py311}] +description = + Run functional tests. +setenv = + {[testenv]setenv} +commands = + # Generators conflict with each other since every service is screwing + # OsloConfig to its own flavor and I have not found a working way to deal with + # that except of physically isolating them + stestr --test-path ./codegenerator/tests/functional/ run {posargs} + stestr slowest + +[testenv:docs{,-py310,-py311}] +description = + Build documentation in HTML format. +# We do not want the package (because of heavy dependencies for docs) +skip_install = True +deps = + -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} + -r{toxinidir}/doc/requirements.txt +commands = + sphinx-build -W --keep-going -b html -j auto doc/source/ doc/build/html + +[testenv:releasenotes{,-py310,-py311}] +deps = + -c{env:TOX_CONSTRAINTS_FILE:https://releases.openstack.org/constraints/upper/master} + -r{toxinidir}/doc/requirements.txt +skip_install = True +commands = + sphinx-build -a -E -W -d releasenotes/build/doctrees -b html releasenotes/source releasenotes/build/html + +[testenv:pep8] +description = + Run style checks. +deps = + pre-commit +commands = + pre-commit run --all-files --show-diff-on-failure + +[flake8] +application-import-names = codegenerator +# The following are ignored on purpose. It's not super worth it to fix them. +# However, if you feel strongly about it, patches will be accepted to fix them +# if they fix ALL of the occurances of one and only one of them. +# E203 Black will put spaces after colons in list comprehensions +# E501 Black takes care of line length for us +# H238 New Style Classes are the default in Python3 +# H301 Black will put commas after imports that can't fit on one line +# H4 Are about docstrings and there's just a huge pile of pre-existing issues. +# W503 Is supposed to be off by default but in the latest pycodestyle isn't. +# Also, both openstacksdk and Donald Knuth disagree with the rule. Line +# breaks should occur before the binary operator for readability. +ignore = E203, E501, H301, H238, H4, W503 +import-order-style = pep8 +show-source = True +exclude = .venv,.git,.tox,dist,doc,*lib/python*,*egg,build, diff --git a/zuul.d/project.yaml b/zuul.d/project.yaml new file mode 100644 index 0000000..00cd7ad --- /dev/null +++ b/zuul.d/project.yaml @@ -0,0 +1,8 @@ +--- +- project: + templates: + - publish-openstack-docs-pti + check: + jobs: + - openstack-tox-pep8 + - openstack-tox-py311