Fix pyproject.toml file name

- rename properly pyproject.toml file
- reformatted files due to rules not being applied before
- address now failing unittest

Change-Id: I189a0c71253fe87c5fa91f6d7f46fb350fe4d0d9
This commit is contained in:
Artem Goncharov 2024-09-18 16:57:45 +02:00
parent ca4120cddd
commit 2326ce5e7b
66 changed files with 1923 additions and 1838 deletions

View File

@ -44,21 +44,21 @@ class AnsibleGenerator(BaseGenerator):
def generate(self, res, target_dir, args=None):
"""Generate code for the Ansible"""
logging.debug("Generating Ansible code in %s" % target_dir)
logging.debug(f"Generating Ansible code in {target_dir}")
ansible_path = ["plugins", "modules"]
context = dict(
res=res.resource_class,
sdk_mod_name=res.mod_name,
class_name=res.class_name,
resource_name=res.class_name.lower(),
sdk_service_name=res.service_name,
proxy=res.proxy_obj,
fqcn=res.fqcn,
registry_name=res.registry_name,
attrs=res.attrs,
target_name=res.class_name.lower(),
)
context = {
"res": res.resource_class,
"sdk_mod_name": res.mod_name,
"class_name": res.class_name,
"resource_name": res.class_name.lower(),
"sdk_service_name": res.service_name,
"proxy": res.proxy_obj,
"fqcn": res.fqcn,
"registry_name": res.registry_name,
"attrs": res.attrs,
"target_name": res.class_name.lower(),
}
if args and args.alternative_target_name:
context["target_name"] = args.alternative_target_name
context["ansible_module_name"] = "".join(
@ -96,6 +96,4 @@ class AnsibleGenerator(BaseGenerator):
# Format rendered code to have less flake complains. This will still
# not guarantee code is fitting perfect, since there might be too long
# lines
self._format_code(
Path(work_dir, "/".join(ansible_path)),
)
self._format_code(Path(work_dir, "/".join(ansible_path)))

View File

@ -49,7 +49,7 @@ class BaseGenerator:
content = template.render(**context)
dest.mkdir(parents=True, exist_ok=True)
with open(Path(dest, fname), "w") as fp:
logging.debug("Writing %s" % (fp.name))
logging.debug(f"Writing {fp.name}")
fp.write(content)
def _format_code(self, *args):

View File

@ -39,14 +39,16 @@ class ResourceProcessor:
def __init__(self, mod_name, class_name):
self.mod_name = mod_name
self.class_name = class_name
self.class_plural_name = class_name + "s" if class_name[:-1] != "y" else "ies"
self.class_plural_name = (
class_name + "s" if class_name[:-1] != "y" else "ies"
)
spec = importlib.util.find_spec(self.mod_name)
if not spec:
raise RuntimeError("Module %s not found" % self.mod_name)
raise RuntimeError(f"Module {self.mod_name} not found")
self.module = importlib.util.module_from_spec(spec)
if not self.module:
raise RuntimeError("Error loading module %s" % self.mod_name)
raise RuntimeError(f"Error loading module {self.mod_name}")
sys.modules[self.mod_name] = self.module
if not spec.loader:
raise RuntimeError("No module loader available")
@ -58,10 +60,10 @@ class ResourceProcessor:
proxy_mod_name = srv_ver_mod + "._proxy"
proxy_spec = importlib.util.find_spec(proxy_mod_name)
if not proxy_spec:
raise RuntimeError("Module %s not found" % proxy_mod_name)
raise RuntimeError(f"Module {proxy_mod_name} not found")
self.proxy_mod = importlib.util.module_from_spec(proxy_spec)
if not self.proxy_mod:
raise RuntimeError("Error loading module %s" % proxy_mod_name)
raise RuntimeError(f"Error loading module {proxy_mod_name}")
sys.modules[proxy_mod_name] = self.proxy_mod
if not proxy_spec.loader:
raise RuntimeError("No module loader available")
@ -81,7 +83,7 @@ class ResourceProcessor:
):
self.registry_name = f"{self.service_name}.{k}"
self.attrs = dict()
self.attrs = {}
self.process()
def process(self):
@ -95,7 +97,7 @@ class ResourceProcessor:
doc = "Name"
elif not doc and k == "tags":
doc = f"{self.class_name} Tags."
self.attrs[k] = dict(attr=v, docs=doc)
self.attrs[k] = {"attr": v, "docs": doc}
def get_attr_docs(self):
mod = pycode.ModuleAnalyzer.for_module(self.mod_name)
@ -121,11 +123,13 @@ class Generator:
def get_openapi_spec(self, path: Path):
logging.debug("Fetch %s", path)
if path.as_posix() not in self.schemas:
self.schemas[path.as_posix()] = common.get_openapi_spec(path.as_posix())
self.schemas[path.as_posix()] = common.get_openapi_spec(
path.as_posix()
)
return self.schemas[path.as_posix()]
def load_metadata(self, path: Path):
with open(path, "r") as fp:
with open(path) as fp:
data = yaml.safe_load(fp)
self.metadata = Metadata(**data)
@ -158,45 +162,30 @@ def main():
],
help="Target for which to generate code",
)
parser.add_argument("--work-dir", help="Working directory for the generated code")
parser.add_argument(
"--alternative-module-path",
help=("Optional new module path"),
"--work-dir", help="Working directory for the generated code"
)
parser.add_argument(
"--alternative-module-path", help=("Optional new module path")
)
parser.add_argument(
"--alternative-module-name",
help=("Optional new module name " "(rename get into list)"),
)
parser.add_argument(
"--openapi-yaml-spec",
help=("Path to the OpenAPI spec file (yaml)"),
)
parser.add_argument(
"--openapi-operation-id",
help=("OpenAPI operationID"),
)
parser.add_argument(
"--service-type",
help=("Catalog service type"),
"--openapi-yaml-spec", help=("Path to the OpenAPI spec file (yaml)")
)
parser.add_argument("--openapi-operation-id", help=("OpenAPI operationID"))
parser.add_argument("--service-type", help=("Catalog service type"))
parser.add_argument(
"--api-version",
help=("Api version (used in path for resulting code, i.e. v1)"),
)
parser.add_argument(
"--metadata",
help=("Metadata file to load"),
)
parser.add_argument(
"--service",
help=("Metadata service name filter"),
)
parser.add_argument(
"--resource",
help=("Metadata resource name filter"),
)
parser.add_argument("--metadata", help=("Metadata file to load"))
parser.add_argument("--service", help=("Metadata service name filter"))
parser.add_argument("--resource", help=("Metadata resource name filter"))
parser.add_argument(
"--validate",
action="store_true",
@ -246,11 +235,13 @@ def main():
openapi_spec = generator.get_openapi_spec(
Path(
# metadata_path.parent,
op_data.spec_file or res_data.spec_file,
op_data.spec_file or res_data.spec_file
).resolve()
)
for mod_path, mod_name, path in generators[args.target].generate(
for mod_path, mod_name, path in generators[
args.target
].generate(
res,
args.work_dir,
openapi_spec=openapi_spec,
@ -278,16 +269,19 @@ def main():
)
if args.target == "rust-sdk" and not args.resource:
resource_results: dict[str, dict] = dict()
resource_results: dict[str, dict] = {}
for mod_path, mod_name, path in res_mods:
mn = "/".join(mod_path)
x = resource_results.setdefault(mn, {"path": path, "mods": set()})
x = resource_results.setdefault(
mn, {"path": path, "mods": set()}
)
x["mods"].add(mod_name)
changed = True
while changed:
changed = False
for mod_path in [
mod_path_str.split("/") for mod_path_str in resource_results.keys()
mod_path_str.split("/")
for mod_path_str in resource_results.keys()
]:
if len(mod_path) < 3:
continue

View File

@ -35,7 +35,9 @@ FQAN_ALIAS_MAP = {
}
def _deep_merge(dict1: dict[Any, Any], dict2: dict[Any, Any]) -> dict[Any, Any]:
def _deep_merge(
dict1: dict[Any, Any], dict2: dict[Any, Any]
) -> dict[Any, Any]:
result = dict1.copy()
for key, value in dict2.items():
if key in result:
@ -51,7 +53,7 @@ def _deep_merge(dict1: dict[Any, Any], dict2: dict[Any, Any]) -> dict[Any, Any]:
class BasePrimitiveType(BaseModel):
lifetimes: set[str] | None = None
builder_macros: set[str] = set([])
builder_macros: set[str] = set()
class BaseCombinedType(BaseModel):
@ -70,7 +72,7 @@ class BaseCompoundType(BaseModel):
def get_openapi_spec(path: str | Path):
"""Load OpenAPI spec from a file"""
with open(path, "r") as fp:
with open(path) as fp:
spec_data = jsonref.replace_refs(yaml.safe_load(fp), proxies=False)
return Spec.from_dict(spec_data)
@ -83,7 +85,7 @@ def find_openapi_operation(spec, operationId: str):
continue
if method_spec.get("operationId") == operationId:
return (path, method, method_spec)
raise RuntimeError("Cannot find operation %s specification" % operationId)
raise RuntimeError(f"Cannot find operation {operationId} specification")
def get_plural_form(resource: str) -> str:
@ -171,10 +173,14 @@ def find_resource_schema(
elif "properties" in schema:
schema["type"] = "object"
else:
raise RuntimeError("No type in %s" % schema)
raise RuntimeError(f"No type in {schema}")
schema_type = schema["type"]
if schema_type == "array":
if parent and resource_name and parent == get_plural_form(resource_name):
if (
parent
and resource_name
and parent == get_plural_form(resource_name)
):
items = schema["items"]
if (
items.get("type") == "object"
@ -185,7 +191,9 @@ def find_resource_schema(
return (items["properties"][resource_name], parent)
else:
return (items, parent)
elif not parent and schema.get("items", {}).get("type") == "object":
elif (
not parent and schema.get("items", {}).get("type") == "object"
):
# Array on the top level. Most likely we are searching for items
# directly
return (schema["items"], None)
@ -228,7 +236,9 @@ def find_resource_schema(
else:
return (schema, None)
except Exception as ex:
logging.exception(f"Caught exception {ex} during processing of {schema}")
logging.exception(
f"Caught exception {ex} during processing of {schema}"
)
raise
return (None, None)
@ -275,7 +285,9 @@ def find_response_schema(
for candidate in oneof:
if (
action_name
and candidate.get("x-openstack", {}).get("action-name")
and candidate.get("x-openstack", {}).get(
"action-name"
)
== action_name
):
if response_key in candidate.get("properties", {}):
@ -363,17 +375,24 @@ def get_operation_variants(spec: dict, operation_name: str):
variant_spec = variant.get("x-openstack", {})
if variant_spec.get("action-name") == operation_name:
discriminator = variant_spec.get("discriminator")
if "oneOf" in variant and discriminator == "microversion":
if (
"oneOf" in variant
and discriminator == "microversion"
):
logging.debug(
"Microversion discriminator for action bodies"
)
for subvariant in variant["oneOf"]:
subvariant_spec = subvariant.get("x-openstack", {})
subvariant_spec = subvariant.get(
"x-openstack", {}
)
operation_variants.append(
{
"body": subvariant,
"mode": "action",
"min-ver": subvariant_spec.get("min-ver"),
"min-ver": subvariant_spec.get(
"min-ver"
),
"mime_type": mime_type,
}
)
@ -392,8 +411,7 @@ def get_operation_variants(spec: dict, operation_name: str):
break
if not operation_variants:
raise RuntimeError(
"Cannot find body specification for action %s"
% operation_name
f"Cannot find body specification for action {operation_name}"
)
else:
operation_variants.append(
@ -527,20 +545,14 @@ def get_resource_names_from_url(path: str):
def get_rust_sdk_mod_path(service_type: str, api_version: str, path: str):
"""Construct mod path for rust sdk"""
mod_path = [
service_type.replace("-", "_"),
api_version,
]
mod_path = [service_type.replace("-", "_"), api_version]
mod_path.extend([x.lower() for x in get_resource_names_from_url(path)])
return mod_path
def get_rust_cli_mod_path(service_type: str, api_version: str, path: str):
"""Construct mod path for rust sdk"""
mod_path = [
service_type.replace("-", "_"),
api_version,
]
mod_path = [service_type.replace("-", "_"), api_version]
mod_path.extend([x.lower() for x in get_resource_names_from_url(path)])
return mod_path

View File

@ -30,8 +30,8 @@ class Boolean(BasePrimitiveType):
"""Basic Boolean"""
type_hint: str = "bool"
imports: set[str] = set([])
clap_macros: set[str] = set(["action=clap::ArgAction::Set"])
imports: set[str] = set()
clap_macros: set[str] = {"action=clap::ArgAction::Set"}
original_data_type: BaseCompoundType | BaseCompoundType | None = None
def get_sample(self):
@ -40,7 +40,7 @@ class Boolean(BasePrimitiveType):
class Number(BasePrimitiveType):
format: str | None = None
imports: set[str] = set([])
imports: set[str] = set()
clap_macros: set[str] = set()
original_data_type: BaseCompoundType | BaseCompoundType | None = None
@ -59,7 +59,7 @@ class Number(BasePrimitiveType):
class Integer(BasePrimitiveType):
format: str | None = None
imports: set[str] = set([])
imports: set[str] = set()
clap_macros: set[str] = set()
original_data_type: BaseCompoundType | BaseCompoundType | None = None
@ -77,8 +77,8 @@ class Integer(BasePrimitiveType):
class Null(BasePrimitiveType):
type_hint: str = "Value"
imports: set[str] = set(["serde_json::Value"])
builder_macros: set[str] = set([])
imports: set[str] = {"serde_json::Value"}
builder_macros: set[str] = set()
clap_macros: set[str] = set()
original_data_type: BaseCompoundType | BaseCompoundType | None = None
@ -89,13 +89,13 @@ class Null(BasePrimitiveType):
class String(BasePrimitiveType):
format: str | None = None
type_hint: str = "String"
builder_macros: set[str] = set(["setter(into)"])
builder_macros: set[str] = {"setter(into)"}
# NOTE(gtema): it is not possible to override field with computed
# property, thus it must be a property here
@property
def imports(self) -> set[str]:
return set([])
return set()
def get_sample(self):
return '"foo"'
@ -103,14 +103,14 @@ class String(BasePrimitiveType):
class JsonValue(BasePrimitiveType):
type_hint: str = "Value"
builder_macros: set[str] = set(["setter(into)"])
builder_macros: set[str] = {"setter(into)"}
def get_sample(self):
return "json!({})"
@property
def imports(self):
imports: set[str] = set(["serde_json::Value"])
imports: set[str] = {"serde_json::Value"}
return imports
@ -133,7 +133,7 @@ class Option(BaseCombinedType):
@property
def builder_macros(self):
macros = set(["setter(into)"])
macros = {"setter(into)"}
wrapped_macros = self.item_type.builder_macros
if "private" in wrapped_macros:
macros = wrapped_macros
@ -165,7 +165,7 @@ class Array(BaseCombinedType):
@property
def builder_macros(self):
macros = set(["setter(into)"])
macros = {"setter(into)"}
return macros
def get_sample(self):
@ -194,7 +194,7 @@ class CommaSeparatedList(BaseCombinedType):
@property
def imports(self):
imports: set[str] = set([])
imports: set[str] = set()
imports.update(self.item_type.imports)
return imports
@ -205,7 +205,7 @@ class CommaSeparatedList(BaseCombinedType):
class BTreeSet(BaseCombinedType):
item_type: BasePrimitiveType | BaseCombinedType | BaseCompoundType
builder_macros: set[str] = set(["setter(into)"])
builder_macros: set[str] = {"setter(into)"}
@property
def type_hint(self):
@ -253,11 +253,13 @@ class Struct(BaseCompoundType):
@property
def type_hint(self):
return self.name + (f"<{', '.join(self.lifetimes)}>" if self.lifetimes else "")
return self.name + (
f"<{', '.join(self.lifetimes)}>" if self.lifetimes else ""
)
@property
def imports(self):
imports: set[str] = set([])
imports: set[str] = set()
field_types = [x.data_type for x in self.fields.values()]
if len(field_types) > 1 or (
len(field_types) == 1
@ -313,7 +315,9 @@ class Enum(BaseCompoundType):
@property
def type_hint(self):
return self.name + (f"<{', '.join(self.lifetimes)}>" if self.lifetimes else "")
return self.name + (
f"<{', '.join(self.lifetimes)}>" if self.lifetimes else ""
)
@property
def imports(self):
@ -340,9 +344,11 @@ class Enum(BaseCompoundType):
class StringEnum(BaseCompoundType):
base_type: str = "enum"
variants: dict[str, set[str]] = {}
imports: set[str] = set(["serde::Deserialize", "serde::Serialize"])
imports: set[str] = {"serde::Deserialize", "serde::Serialize"}
lifetimes: set[str] = set()
derive_container_macros: str = "#[derive(Debug, Deserialize, Clone, Serialize)]"
derive_container_macros: str = (
"#[derive(Debug, Deserialize, Clone, Serialize)]"
)
builder_container_macros: str | None = None
serde_container_macros: str | None = None # "#[serde(untagged)]"
serde_macros: set[str] | None = None
@ -365,12 +371,12 @@ class StringEnum(BaseCompoundType):
def get_sample(self):
"""Generate sample data"""
variant = list(sorted(self.variants.keys()))[0]
variant = sorted(self.variants.keys())[0]
return f"{self.name}::{variant}"
def variant_serde_macros(self, variant: str):
"""Return serde macros"""
macros = set([])
macros = set()
vals = self.variants[variant]
if len(vals) > 1:
macros.add(f'rename(serialize = "{sorted(vals)[0]}")')
@ -484,7 +490,9 @@ class TypeManager:
def get_local_attribute_name(self, name: str) -> str:
"""Get localized attribute name"""
name = name.replace(".", "_")
attr_name = "_".join(x.lower() for x in re.split(common.SPLIT_NAME_RE, name))
attr_name = "_".join(
x.lower() for x in re.split(common.SPLIT_NAME_RE, name)
)
if attr_name in ["type", "self", "enum", "ref", "default"]:
attr_name = f"_{attr_name}"
return attr_name
@ -502,7 +510,8 @@ class TypeManager:
if not model_ref:
return "Request"
name = "".join(
x.capitalize() for x in re.split(common.SPLIT_NAME_RE, model_ref.name)
x.capitalize()
for x in re.split(common.SPLIT_NAME_RE, model_ref.name)
)
return name
@ -510,15 +519,16 @@ class TypeManager:
for model_ in self.models:
if model_.reference == model_ref:
return model_
raise RuntimeError("Cannot find reference %s" % model_ref)
raise RuntimeError(f"Cannot find reference {model_ref}")
def convert_model(
self,
type_model: model.PrimitiveType | model.ADT | model.Reference,
self, type_model: model.PrimitiveType | model.ADT | model.Reference
) -> BasePrimitiveType | BaseCombinedType | BaseCompoundType:
"""Get local destination type from the ModelType"""
# logging.debug("Get RustSDK type for %s", type_model)
typ: BasePrimitiveType | BaseCombinedType | BaseCompoundType | None = None
typ: BasePrimitiveType | BaseCombinedType | BaseCompoundType | None = (
None
)
model_ref: model.Reference | None = None
if isinstance(type_model, model.Reference):
model_ref = type_model
@ -530,7 +540,7 @@ class TypeManager:
# Primitive
xtyp = self.primitive_type_mapping.get(type_model.__class__)
if not xtyp:
raise RuntimeError("No mapping for %s" % type_model)
raise RuntimeError(f"No mapping for {type_model}")
return xtyp(**type_model.model_dump())
# Composite/Compound type
@ -573,11 +583,13 @@ class TypeManager:
# TODO(gtema): make parent nullable or add "null"
# as enum value
type_model.literals.remove(None)
for lit in set(x.lower() for x in type_model.literals):
for lit in {x.lower() for x in type_model.literals}:
val = "".join(
[
x.capitalize()
for x in re.split(common.SPLIT_NAME_RE, lit)
for x in re.split(
common.SPLIT_NAME_RE, lit
)
]
)
if val and val[0].isdigit():
@ -592,9 +604,13 @@ class TypeManager:
variants=variants,
)
except Exception:
logging.exception("Error processing enum: %s", type_model)
logging.exception(
"Error processing enum: %s", type_model
)
elif base_type is model.ConstraintInteger:
typ = self.primitive_type_mapping[model.ConstraintInteger]()
typ = self.primitive_type_mapping[
model.ConstraintInteger
]()
elif base_type is model.ConstraintNumber:
typ = self.primitive_type_mapping[model.ConstraintNumber]()
elif base_type is model.PrimitiveBoolean:
@ -602,8 +618,7 @@ class TypeManager:
if not typ:
raise RuntimeError(
"Cannot map model type %s to Rust type [%s]"
% (type_model.__class__.__name__, type_model)
f"Cannot map model type {type_model.__class__.__name__} to Rust type [{type_model}]"
)
if not model_ref:
@ -673,7 +688,9 @@ class TypeManager:
if item_type.__class__ == lt.item_type.__class__:
result_data_type = self.data_type_mapping[model.Array](
item_type=item_type,
description=sanitize_rust_docstrings(type_model.description),
description=sanitize_rust_docstrings(
type_model.description
),
)
# logging.debug("Replacing Typ + list[Typ] with list[Typ]")
elif len(kinds) == 1:
@ -702,7 +719,9 @@ class TypeManager:
result_data_type.kinds[enum_kind.name] = enum_kind
if is_nullable:
result_data_type = self.option_type_class(item_type=result_data_type)
result_data_type = self.option_type_class(
item_type=result_data_type
)
return result_data_type
@ -761,18 +780,26 @@ class TypeManager:
kinds.remove(typ)
elif string_klass in kinds_classes and integer_klass in kinds_classes:
int_klass = next(
(x for x in type_model.kinds if isinstance(x, model.ConstraintInteger))
x
for x in type_model.kinds
if isinstance(x, model.ConstraintInteger)
)
if (
# XX_size or XX_count is clearly an integer
(
enum_name
and (enum_name.endswith("size") or enum_name.endswith("count"))
and (
enum_name.endswith("size")
or enum_name.endswith("count")
)
)
# There is certain limit (min/max) - it can be only integer
or (
int_klass
and (int_klass.minimum is not None or int_klass.maximum is not None)
and (
int_klass.minimum is not None
or int_klass.maximum is not None
)
)
):
for typ in list(kinds):
@ -832,7 +859,8 @@ class TypeManager:
# Try adding parent_name as prefix
new_name = (
"".join(
x.title() for x in model_.reference.parent.name.split("_")
x.title()
for x in model_.reference.parent.name.split("_")
)
+ name
)
@ -851,7 +879,8 @@ class TypeManager:
# Try adding parent_name as prefix
new_other_name = (
"".join(
x.title() for x in other_model.parent.name.split("_")
x.title()
for x in other_model.parent.name.split("_")
)
+ name
)
@ -862,34 +891,37 @@ class TypeManager:
# with remote being oneOf with multiple structs)
# Try to make a name consisting of props
props = model_data_type.fields.keys()
new_new_name = name + "".join(x.title() for x in props).replace(
"_", ""
)
new_new_name = name + "".join(
x.title() for x in props
).replace("_", "")
if new_new_name not in unique_models:
for other_ref, other_model in self.refs.items():
other_name = getattr(other_model, "name", None)
if not other_name:
continue
if other_name in [
name,
new_name,
] and isinstance(other_model, Struct):
if other_name in [name, new_name] and isinstance(
other_model, Struct
):
# rename first occurence to the same scheme
props = other_model.fields.keys()
new_other_name = name + "".join(
x.title() for x in props
).replace("_", "")
other_model.name = new_other_name
unique_models[new_other_name] = model_.reference
unique_models[new_other_name] = (
model_.reference
)
model_data_type.name = new_new_name
unique_models[new_new_name] = model_.reference
else:
raise RuntimeError(
"Model name %s is already present" % new_new_name
f"Model name {new_new_name} is already present"
)
else:
raise RuntimeError("Model name %s is already present" % new_name)
raise RuntimeError(
f"Model name {new_name} is already present"
)
elif (
name
and name in unique_models
@ -908,9 +940,17 @@ class TypeManager:
def get_subtypes(self):
"""Get all subtypes excluding TLA"""
for k, v in self.refs.items():
if k and isinstance(v, (Enum, Struct, StringEnum)) and k.name != "Body":
if (
k
and isinstance(v, (Enum, Struct, StringEnum))
and k.name != "Body"
):
yield v
elif k and k.name != "Body" and isinstance(v, self.option_type_class):
elif (
k
and k.name != "Body"
and isinstance(v, self.option_type_class)
):
if isinstance(v.item_type, Enum):
yield v.item_type
@ -922,7 +962,10 @@ class TypeManager:
# There might be tuple Struct (with
# fields as list)
field_names = list(v.fields.keys())
if len(field_names) == 1 and v.fields[field_names[0]].is_optional:
if (
len(field_names) == 1
and v.fields[field_names[0]].is_optional
):
# A body with only field can not normally be optional
logging.warning(
"Request body with single root field cannot be optional"
@ -997,8 +1040,7 @@ class TypeManager:
yield (k, v)
def discard_model(
self,
type_model: model.PrimitiveType | model.ADT | model.Reference,
self, type_model: model.PrimitiveType | model.ADT | model.Reference
):
"""Discard model from the manager"""
logging.debug(f"Request to discard {type_model}")
@ -1010,7 +1052,9 @@ class TypeManager:
if ref == type_model.reference:
sub_ref: model.Reference | None = None
if ref.type == model.Struct:
logging.debug("Element is a struct. Purging also field types")
logging.debug(
"Element is a struct. Purging also field types"
)
# For struct type we cascadely discard all field types as
# well
for v in type_model.fields.values():
@ -1022,7 +1066,9 @@ class TypeManager:
logging.debug(f"Need to purge also {sub_ref}")
self.discard_model(sub_ref)
elif ref.type == model.OneOfType:
logging.debug("Element is a OneOf. Purging also kinds types")
logging.debug(
"Element is a OneOf. Purging also kinds types"
)
for v in type_model.kinds:
if isinstance(v, model.Reference):
sub_ref = v
@ -1038,7 +1084,9 @@ class TypeManager:
if isinstance(type_model.item_type, model.Reference):
sub_ref = type_model.item_type
else:
sub_ref = getattr(type_model.item_type, "reference", None)
sub_ref = getattr(
type_model.item_type, "reference", None
)
if sub_ref:
logging.debug(f"Need to purge also {sub_ref}")
self.discard_model(sub_ref)

View File

@ -35,7 +35,9 @@ class TypeSchema(BaseModel):
ref: Optional[str] = Field(alias="$ref", default=None)
oneOf: Optional[List[Any]] = Field(default=None)
anyOf: Optional[List[Any]] = Field(default=None)
openstack: Optional[Dict[str, Any]] = Field(alias="x-openstack", default=None)
openstack: Optional[Dict[str, Any]] = Field(
alias="x-openstack", default=None
)
required: Optional[List[str]] = None
pattern: Optional[str] = None
maxLength: Optional[int] = None
@ -103,7 +105,7 @@ class OperationSchema(BaseModel):
operationId: str | None = None
requestBody: dict = {}
responses: Dict[str, dict] = {}
tags: List[str] = list()
tags: List[str] = []
deprecated: bool | None = None
openstack: dict = Field(alias="x-openstack", default={})
security: List | None = None
@ -113,7 +115,9 @@ class HeaderSchema(BaseModel):
model_config = ConfigDict(extra="allow", populate_by_name=True)
description: Optional[str] = None
openstack: Optional[Dict[str, Any]] = Field(alias="x-openstack", default=None)
openstack: Optional[Dict[str, Any]] = Field(
alias="x-openstack", default=None
)
schema: Optional[TypeSchema] = Field(default=None)

View File

@ -29,7 +29,9 @@ class JsonSchemaGenerator(BaseGenerator):
properties = {}
for k, v in res.attrs.items():
field = v["attr"]
properties[field.name] = TypeSchema.from_sdk_field(field).model_dump(
properties[field.name] = TypeSchema.from_sdk_field(
field
).model_dump(
exclude_none=True, exclude_defaults=True, by_alias=True
)
if "docs" in v:
@ -54,7 +56,9 @@ class JsonSchemaGenerator(BaseGenerator):
"properties": properties,
}
}
schema = TypeSchema(type="object", properties=properties, description="")
schema = TypeSchema(
type="object", properties=properties, description=""
)
# if res.resource_class._store_unknown_attrs_as_properties:
# schema_attrs["additionalProperties"] = True
# schema_attrs["properties"] = properties

View File

@ -25,16 +25,14 @@ from codegenerator.types import OperationModel
from codegenerator.types import OperationTargetParams
from codegenerator.types import ResourceModel
OPERATION_ID_BLACKLIST: set[str] = set(
[
# # BlockStorage
# ## Host put has no schema
"project_id/os-hosts:put",
"os-hosts:put",
"project_id/os-hosts/id:put",
"os-hosts/id:put",
]
)
OPERATION_ID_BLACKLIST: set[str] = {
# # BlockStorage
# ## Host put has no schema
"project_id/os-hosts:put",
"os-hosts:put",
"project_id/os-hosts/id:put",
"os-hosts/id:put",
}
class MetadataGenerator(BaseGenerator):
@ -45,7 +43,7 @@ class MetadataGenerator(BaseGenerator):
if not path.exists():
return
yaml = YAML(typ="safe")
with open(path, "r") as fp:
with open(path) as fp:
spec = jsonref.replace_refs(yaml.load(fp))
return SpecSchema(**spec)
@ -63,12 +61,12 @@ class MetadataGenerator(BaseGenerator):
schema = self.load_openapi(spec_path)
openapi_spec = common.get_openapi_spec(spec_path)
metadata = Metadata(resources=dict())
metadata = Metadata(resources={})
api_ver = "v" + schema.info["version"].split(".")[0]
for path, spec in schema.paths.items():
path_elements: list[str] = path.split("/")
resource_name = "/".join(
[x for x in common.get_resource_names_from_url(path)]
list(common.get_resource_names_from_url(path))
)
if args.service_type == "object-store":
if path == "/v1/{account}":
@ -117,7 +115,7 @@ class MetadataGenerator(BaseGenerator):
ResourceModel(
api_version=api_ver,
spec_file=spec_path.as_posix(),
operations=dict(),
operations={},
),
)
for method in [
@ -139,7 +137,7 @@ class MetadataGenerator(BaseGenerator):
continue
op_model = OperationModel(
operation_id=operation.operationId, targets=dict()
operation_id=operation.operationId, targets={}
)
operation_key: str | None = None
@ -170,7 +168,10 @@ class MetadataGenerator(BaseGenerator):
operation_key = "create"
elif method == "delete":
operation_key = "delete"
elif path.endswith("/detail") and resource_name != "quota_set":
elif (
path.endswith("/detail")
and resource_name != "quota_set"
):
if method == "get":
operation_key = "list_detailed"
# elif path.endswith("/default"):
@ -335,7 +336,9 @@ class MetadataGenerator(BaseGenerator):
elif method == "delete":
operation_key = "delete"
if not operation_key:
logging.warn(f"Cannot identify op name for {path}:{method}")
logging.warn(
f"Cannot identify op name for {path}:{method}"
)
# Next hacks
if args.service_type == "identity" and resource_name in [
@ -415,28 +418,31 @@ class MetadataGenerator(BaseGenerator):
body_schema = operation.requestBody["content"][
"application/json"
]["schema"]
bodies = body_schema.get("oneOf", [body_schema])
bodies = body_schema.get(
"oneOf", [body_schema]
)
if len(bodies) > 1:
discriminator = body_schema.get(
"x-openstack", {}
).get("discriminator")
if discriminator != "action":
raise RuntimeError(
"Cannot generate metadata for %s since request body is not having action discriminator"
% path
f"Cannot generate metadata for {path} since request body is not having action discriminator"
)
for body in bodies:
action_name = body.get("x-openstack", {}).get(
"action-name"
)
action_name = body.get(
"x-openstack", {}
).get("action-name")
if not action_name:
action_name = list(body["properties"].keys())[0]
action_name = list(
body["properties"].keys()
)[0]
# Hardcode fixes
if resource_name == "flavor" and action_name in [
"update",
"create",
"delete",
]:
if (
resource_name == "flavor"
and action_name
in ["update", "create", "delete"]
):
# Flavor update/create/delete
# operations are exposed ALSO as wsgi
# actions. This is wrong and useless.
@ -453,26 +459,38 @@ class MetadataGenerator(BaseGenerator):
common.SPLIT_NAME_RE, action_name
)
).lower()
rust_sdk_params = get_rust_sdk_operation_args(
"action",
operation_name=action_name,
module_name=get_module_name(action_name),
rust_sdk_params = (
get_rust_sdk_operation_args(
"action",
operation_name=action_name,
module_name=get_module_name(
action_name
),
)
)
rust_cli_params = get_rust_cli_operation_args(
"action",
operation_name=action_name,
module_name=get_module_name(action_name),
resource_name=resource_name,
rust_cli_params = (
get_rust_cli_operation_args(
"action",
operation_name=action_name,
module_name=get_module_name(
action_name
),
resource_name=resource_name,
)
)
op_model = OperationModel(
operation_id=operation.operationId,
targets=dict(),
targets={},
)
op_model.operation_type = "action"
op_model.targets["rust-sdk"] = rust_sdk_params
op_model.targets["rust-cli"] = rust_cli_params
op_model.targets["rust-sdk"] = (
rust_sdk_params
)
op_model.targets["rust-cli"] = (
rust_cli_params
)
op_model = post_process_operation(
args.service_type,
@ -481,17 +499,25 @@ class MetadataGenerator(BaseGenerator):
op_model,
)
resource_model.operations[operation_name] = op_model
resource_model.operations[
operation_name
] = op_model
except KeyError:
raise RuntimeError("Cannot get bodies for %s" % path)
raise RuntimeError(
f"Cannot get bodies for {path}"
)
else:
if not operation_key:
raise NotImplementedError
operation_type = get_operation_type_by_key(operation_key)
operation_type = get_operation_type_by_key(
operation_key
)
op_model.operation_type = operation_type
# NOTE: sdk gets operation_key and not operation_type
rust_sdk_params = get_rust_sdk_operation_args(operation_key)
rust_sdk_params = get_rust_sdk_operation_args(
operation_key
)
rust_cli_params = get_rust_cli_operation_args(
operation_key, resource_name=resource_name
)
@ -523,16 +549,16 @@ class MetadataGenerator(BaseGenerator):
list_op.targets.pop("rust-cli")
# Prepare `find` operation data
if (list_op or list_detailed_op) and res_data.operations.get("show"):
if (list_op or list_detailed_op) and res_data.operations.get(
"show"
):
show_op = res_data.operations["show"]
(path, _, spec) = common.find_openapi_operation(
openapi_spec, show_op.operation_id
)
mod_path = common.get_rust_sdk_mod_path(
args.service_type,
res_data.api_version or "",
path,
args.service_type, res_data.api_version or "", path
)
response_schema = None
for code, rspec in spec.get("responses", {}).items():
@ -541,10 +567,7 @@ class MetadataGenerator(BaseGenerator):
content = rspec.get("content", {})
if "application/json" in content:
try:
(
response_schema,
_,
) = common.find_resource_schema(
(response_schema, _) = common.find_resource_schema(
content["application/json"].get("schema", {}),
None,
)
@ -582,7 +605,8 @@ class MetadataGenerator(BaseGenerator):
name_field = fqan.split(".")[-1]
name_filter_supported: bool = False
if name_field in [
x.get("name") for x in list(list_spec.get("parameters", []))
x.get("name")
for x in list(list_spec.get("parameters", []))
]:
name_filter_supported = True
@ -766,7 +790,9 @@ def post_process_operation(
return operation
def post_process_compute_operation(resource_name: str, operation_name: str, operation):
def post_process_compute_operation(
resource_name: str, operation_name: str, operation
):
if resource_name == "aggregate":
if operation_name in ["set-metadata", "add-host", "remove-host"]:
operation.targets["rust-sdk"].response_key = "aggregate"
@ -782,7 +808,9 @@ def post_process_compute_operation(resource_name: str, operation_name: str, oper
operation.targets["rust-cli"].sdk_mod_name = "list"
operation.targets["rust-cli"].operation_name = "list"
operation.targets["rust-sdk"].response_key = "availabilityZoneInfo"
operation.targets["rust-cli"].cli_full_command = "availability-zone list"
operation.targets[
"rust-cli"
].cli_full_command = "availability-zone list"
elif operation_name == "list_detailed":
operation.operation_type = "list"
operation.targets["rust-sdk"].operation_name = "list_detail"
@ -877,7 +905,9 @@ def post_process_compute_operation(resource_name: str, operation_name: str, oper
return operation
def post_process_identity_operation(resource_name: str, operation_name: str, operation):
def post_process_identity_operation(
resource_name: str, operation_name: str, operation
):
if resource_name == "role/imply":
if operation_name == "list":
operation.targets["rust-cli"].response_key = "role_inference"
@ -934,7 +964,9 @@ def post_process_identity_operation(resource_name: str, operation_name: str, ope
return operation
def post_process_image_operation(resource_name: str, operation_name: str, operation):
def post_process_image_operation(
resource_name: str, operation_name: str, operation
):
if resource_name.startswith("schema"):
# Image schemas are a JSON operation
operation.targets["rust-cli"].operation_type = "json"
@ -944,16 +976,25 @@ def post_process_image_operation(resource_name: str, operation_name: str, operat
elif resource_name == "metadef/namespace" and operation_name != "list":
operation.targets["rust-sdk"].response_key = "null"
operation.targets["rust-cli"].response_key = "null"
elif resource_name == "metadef/namespace/property" and operation_name == "list":
elif (
resource_name == "metadef/namespace/property"
and operation_name == "list"
):
operation.targets["rust-cli"].operation_type = "list_from_struct"
operation.targets["rust-cli"].response_key = "properties"
operation.targets["rust-sdk"].response_key = "properties"
elif resource_name == "metadef/namespace/resource_type":
operation.targets["rust-cli"].response_key = "resource_type_associations"
operation.targets["rust-sdk"].response_key = "resource_type_associations"
operation.targets[
"rust-cli"
].response_key = "resource_type_associations"
operation.targets[
"rust-sdk"
].response_key = "resource_type_associations"
operation.targets["rust-cli"].cli_full_command = operation.targets[
"rust-cli"
].cli_full_command.replace("resource-type", "resource-type-association")
].cli_full_command.replace(
"resource-type", "resource-type-association"
)
elif resource_name == "image":
if operation_name == "patch":
operation.targets["rust-cli"].cli_full_command = operation.targets[
@ -1010,7 +1051,9 @@ def post_process_block_storage_operation(
if "update-snapshot-status" in operation_name:
operation.targets["rust-cli"].cli_full_command = operation.targets[
"rust-cli"
].cli_full_command.replace("update-snapshot-status", "update-status")
].cli_full_command.replace(
"update-snapshot-status", "update-status"
)
if resource_name in ["os_volume_transfer", "volume_transfer"]:
if operation_name in ["list", "list_detailed"]:
@ -1030,7 +1073,9 @@ def post_process_block_storage_operation(
operation.targets["rust-cli"].sdk_mod_name = "list"
operation.targets["rust-cli"].operation_name = "list"
operation.targets["rust-sdk"].response_key = "availabilityZoneInfo"
operation.targets["rust-cli"].cli_full_command = "availability-zone list"
operation.targets[
"rust-cli"
].cli_full_command = "availability-zone list"
if resource_name == "qos_spec/association":
operation.operation_type = "list"
operation.targets["rust-sdk"].operation_name = "list"
@ -1040,7 +1085,9 @@ def post_process_block_storage_operation(
operation.targets["rust-cli"].sdk_mod_name = "list"
operation.targets["rust-sdk"].response_key = "qos_associations"
operation.targets["rust-cli"].response_key = "qos_associations"
operation.targets["rust-cli"].cli_full_command = "qos-spec association list"
operation.targets[
"rust-cli"
].cli_full_command = "qos-spec association list"
if resource_name == "limit" and operation_name == "list":
# Limits API return object and not a list
@ -1054,7 +1101,9 @@ def post_process_block_storage_operation(
return operation
def post_process_network_operation(resource_name: str, operation_name: str, operation):
def post_process_network_operation(
resource_name: str, operation_name: str, operation
):
if resource_name.startswith("floatingip"):
operation.targets["rust-cli"].cli_full_command = operation.targets[
"rust-cli"

View File

@ -42,7 +42,7 @@ class Reference(BaseModel):
#: Name of the object that uses the type under reference
name: str
type: Type | None = None
type: Any = None
hash_: str | None = None
parent: Reference | None = None
@ -131,7 +131,7 @@ class OneOfType(ADT):
class Enum(AbstractCollection):
"""Enum: a unique collection of primitives"""
base_types: list[Type[PrimitiveType]] = []
base_types: list[type[PrimitiveType]] = []
literals: set[Any] = set()
@ -182,10 +182,12 @@ class JsonSchemaParser:
def parse(
self, schema, ignore_read_only: bool = False
) -> ty.Tuple[ADT | None, list[ADT]]:
) -> tuple[ADT | None, list[ADT]]:
"""Parse JsonSchema object into internal DataModel"""
results: list[ADT] = []
res = self.parse_schema(schema, results, ignore_read_only=ignore_read_only)
res = self.parse_schema(
schema, results, ignore_read_only=ignore_read_only
)
return (res, results)
def parse_schema(
@ -353,9 +355,7 @@ class JsonSchemaParser:
if ref:
field = StructField(data_type=ref)
else:
field = StructField(
data_type=data_type,
)
field = StructField(data_type=data_type)
field.description = v.get("description")
if k in required:
@ -407,7 +407,9 @@ class JsonSchemaParser:
if pattern_props and not additional_properties_type:
# `"type": "object", "pattern_properties": ...`
if len(list(pattern_props.values())) == 1:
obj = Dictionary(value_type=list(pattern_props.values())[0])
obj = Dictionary(
value_type=list(pattern_props.values())[0]
)
else:
obj = Struct(pattern_properties=pattern_props)
elif not pattern_props and additional_properties_type:
@ -448,9 +450,15 @@ class JsonSchemaParser:
if obj:
obj.description = schema.get("description")
if obj.reference and f"{obj.reference.name}{obj.reference.type}" in [
f"{x.reference.name}{x.reference.type}" for x in results if x.reference
]:
if (
obj.reference
and f"{obj.reference.name}{obj.reference.type}"
in [
f"{x.reference.name}{x.reference.type}"
for x in results
if x.reference
]
):
if obj.reference.__hash__() in [
x.reference.__hash__() for x in results if x.reference
]:
@ -466,9 +474,9 @@ class JsonSchemaParser:
if parent and name:
new_name = parent.name + "_" + name
if Reference(name=new_name, type=obj.reference.type) in [
x.reference for x in results
]:
if Reference(
name=new_name, type=obj.reference.type
) in [x.reference for x in results]:
raise NotImplementedError
else:
obj.reference.name = new_name
@ -523,10 +531,7 @@ class JsonSchemaParser:
# Bad schema with type being a list of 1 entry
schema["type"] = schema["type"][0]
obj = self.parse_schema(
schema,
results,
name=name,
ignore_read_only=ignore_read_only,
schema, results, name=name, ignore_read_only=ignore_read_only
)
return obj
@ -598,7 +603,7 @@ class JsonSchemaParser:
# todo: decide whether some constraints can be under items
literals = schema.get("enum")
obj = Enum(literals=literals, base_types=[])
literal_types = set([type(x) for x in literals])
literal_types = {type(x) for x in literals}
for literal_type in literal_types:
if literal_type is str:
obj.base_types.append(ConstraintString)
@ -699,24 +704,24 @@ class OpenAPISchemaParser(JsonSchemaParser):
dt = Set(item_type=ConstraintString())
else:
raise NotImplementedError(
"Parameter serialization %s not supported" % schema
f"Parameter serialization {schema} not supported"
)
elif isinstance(param_typ, list):
# Param type can be anything. Process supported combinations first
if param_location == "query" and param_name == "limit":
dt = ConstraintInteger(minimum=0)
elif param_location == "query" and sorted(["string", "boolean"]) == sorted(
param_typ
):
elif param_location == "query" and sorted(
["string", "boolean"]
) == sorted(param_typ):
dt = PrimitiveBoolean()
elif param_location == "query" and sorted(["string", "integer"]) == sorted(
param_typ
):
elif param_location == "query" and sorted(
["string", "integer"]
) == sorted(param_typ):
dt = ConstraintInteger(**param_schema)
elif param_location == "query" and sorted(["string", "number"]) == sorted(
param_typ
):
elif param_location == "query" and sorted(
["string", "number"]
) == sorted(param_typ):
dt = ConstraintNumber(**param_schema)
if isinstance(dt, ADT):
@ -728,7 +733,9 @@ class OpenAPISchemaParser(JsonSchemaParser):
is_flag: bool = False
os_ext = schema.get("x-openstack", {})
if not isinstance(os_ext, dict):
raise RuntimeError(f"x-openstack must be a dictionary inside {schema}")
raise RuntimeError(
f"x-openstack must be a dictionary inside {schema}"
)
if "is-flag" in os_ext:
is_flag = os_ext["is-flag"]
@ -742,6 +749,6 @@ class OpenAPISchemaParser(JsonSchemaParser):
is_flag=is_flag,
resource_link=os_ext.get("resource_link", None),
)
raise NotImplementedError("Parameter %s is not covered yet" % schema)
raise NotImplementedError(f"Parameter {schema} is not covered yet")
raise RuntimeError("Parameter %s is not supported yet" % schema)
raise RuntimeError(f"Parameter {schema} is not supported yet")

View File

@ -111,7 +111,7 @@ class OpenStackServerSourceBase:
return
yaml = YAML(typ="safe")
yaml.preserve_quotes = True
with open(path, "r") as fp:
with open(path) as fp:
spec = yaml.load(fp)
return SpecSchema(**spec)
@ -150,7 +150,9 @@ class OpenStackServerSourceBase:
if os_ext == {}:
v.openstack = None
def _process_route(self, route, openapi_spec, ver_prefix=None, framework=None):
def _process_route(
self, route, openapi_spec, ver_prefix=None, framework=None
):
# Placement exposes "action" as controller in route defaults, all others - "controller"
if not ("controller" in route.defaults or "action" in route.defaults):
return
@ -174,11 +176,17 @@ class OpenStackServerSourceBase:
# if "method" not in route.conditions:
# raise RuntimeError("Method not set for %s", route)
method = route.conditions.get("method", "GET")[0] if route.conditions else "GET"
method = (
route.conditions.get("method", "GET")[0]
if route.conditions
else "GET"
)
controller = route.defaults.get("controller")
action = route.defaults.get("action")
logging.info("Path: %s; method: %s; operation: %s", path, method, action)
logging.info(
"Path: %s; method: %s; operation: %s", path, method, action
)
versioned_methods = {}
controller_actions = {}
@ -208,7 +216,7 @@ class OpenStackServerSourceBase:
contr = action
action = None
else:
raise RuntimeError("Unsupported controller %s" % controller)
raise RuntimeError(f"Unsupported controller {controller}")
# logging.debug("Actions: %s, Versioned methods: %s", actions, versioned_methods)
# path_spec = openapi_spec.paths.setdefault(path, PathSchema())
@ -231,7 +239,9 @@ class OpenStackServerSourceBase:
for path_element in path_elements:
if "{" in path_element:
param_name = path_element.strip("{}")
global_param_name = "_".join(path_resource_names) + f"_{param_name}"
global_param_name = (
"_".join(path_resource_names) + f"_{param_name}"
)
param_ref_name = self._get_param_ref(
openapi_spec,
@ -241,7 +251,7 @@ class OpenStackServerSourceBase:
path=path,
)
# Ensure reference to the param is in the path_params
if param_ref_name not in [k.ref for k in [p for p in path_params]]:
if param_ref_name not in [k.ref for k in list(path_params)]:
path_params.append(ParameterSchema(ref=param_ref_name))
# Cleanup path_resource_names
# if len(path_resource_names) > 0 and VERSION_RE.match(path_resource_names[0]):
@ -263,7 +273,8 @@ class OpenStackServerSourceBase:
operation_id = re.sub(
r"^(/?v[0-9.]*/)",
"",
"/".join([x.strip("{}") for x in path_elements]) + f":{method.lower()}", # noqa
"/".join([x.strip("{}") for x in path_elements])
+ f":{method.lower()}", # noqa
)
if action in versioned_methods:
@ -363,7 +374,9 @@ class OpenStackServerSourceBase:
for action, op_name in controller_actions.items():
logging.info("Action %s: %s", action, op_name)
(start_version, end_version) = (None, None)
action_impls: list[tuple[Callable, str | None, str | None]] = []
action_impls: list[
tuple[Callable, str | None, str | None]
] = []
if isinstance(op_name, str):
# wsgi action value is a string
if op_name in versioned_methods:
@ -376,7 +389,9 @@ class OpenStackServerSourceBase:
ver_method.end_version,
)
)
logging.info("Versioned action %s", ver_method.func)
logging.info(
"Versioned action %s", ver_method.func
)
elif hasattr(contr, op_name):
# ACTION with no version bounds
func = getattr(contr, op_name)
@ -413,7 +428,9 @@ class OpenStackServerSourceBase:
ver_method.end_version,
)
)
logging.info("Versioned action %s", ver_method.func)
logging.info(
"Versioned action %s", ver_method.func
)
elif slf and key:
vm = getattr(slf, "versioned_methods", None)
if vm and key in vm:
@ -431,7 +448,9 @@ class OpenStackServerSourceBase:
ver_method.end_version,
)
)
logging.info("Versioned action %s", ver_method.func)
logging.info(
"Versioned action %s", ver_method.func
)
else:
action_impls.append((op_name, None, None))
@ -534,10 +553,7 @@ class OpenStackServerSourceBase:
path: str | None = None,
):
logging.info(
"%s: %s [%s]",
(mode or "operation").title(),
operation_name,
func,
"%s: %s [%s]", (mode or "operation").title(), operation_name, func
)
# New decorators start having explicit null ApiVersion instead of being null
if (
@ -574,7 +590,9 @@ class OpenStackServerSourceBase:
if operation_spec.description:
# Reading spec from yaml file it was converted back to regular
# string. Therefore need to force it back to Literal block.
operation_spec.description = LiteralScalarString(operation_spec.description)
operation_spec.description = LiteralScalarString(
operation_spec.description
)
action_name = None
query_params_versions = []
@ -591,13 +609,22 @@ class OpenStackServerSourceBase:
if not (
"min-ver" in operation_spec.openstack
and tuple(
[int(x) for x in operation_spec.openstack["min-ver"].split(".")]
[
int(x)
for x in operation_spec.openstack["min-ver"].split(".")
]
)
< (self._api_ver(start_version))
):
operation_spec.openstack["min-ver"] = start_version.get_string()
operation_spec.openstack["min-ver"] = (
start_version.get_string()
)
if mode != "action" and end_version and self._api_ver_major(end_version):
if (
mode != "action"
and end_version
and self._api_ver_major(end_version)
):
if self._api_ver_major(end_version) == 0:
operation_spec.openstack.pop("max-ver", None)
operation_spec.deprecated = None
@ -609,11 +636,18 @@ class OpenStackServerSourceBase:
if not (
"max-ver" in operation_spec.openstack
and tuple(
[int(x) for x in operation_spec.openstack["max-ver"].split(".")]
[
int(x)
for x in operation_spec.openstack["max-ver"].split(
"."
)
]
)
> self._api_ver(end_version)
):
operation_spec.openstack["max-ver"] = end_version.get_string()
operation_spec.openstack["max-ver"] = (
end_version.get_string()
)
action_name = getattr(func, "wsgi_action", None)
if action_name:
@ -669,7 +703,9 @@ class OpenStackServerSourceBase:
if query_params_versions:
so = sorted(
query_params_versions,
key=lambda d: (tuple(map(int, d[1].split("."))) if d[1] else (0, 0)),
key=lambda d: (
tuple(map(int, d[1].split("."))) if d[1] else (0, 0)
),
)
for data, min_ver, max_ver in so:
self.process_query_parameters(
@ -695,7 +731,7 @@ class OpenStackServerSourceBase:
response_body_schema = ser_schema
responses_spec = operation_spec.responses
for error in expected_errors:
responses_spec.setdefault(str(error), dict(description="Error"))
responses_spec.setdefault(str(error), {"description": "Error"})
if mode != "action" and str(error) == "410":
# This looks like a deprecated operation still hanging out there
@ -721,7 +757,7 @@ class OpenStackServerSourceBase:
if response_codes:
for response_code in response_codes:
rsp = responses_spec.setdefault(
str(response_code), dict(description="Ok")
str(response_code), {"description": "Ok"}
)
if str(response_code) != "204" and method != "DELETE":
# Arrange response placeholder
@ -729,7 +765,9 @@ class OpenStackServerSourceBase:
"".join([x.title() for x in path_resource_names])
+ (
operation_name.replace("index", "list").title()
if not path_resource_names[-1].endswith(operation_name)
if not path_resource_names[-1].endswith(
operation_name
)
else ""
)
+ "Response"
@ -762,20 +800,24 @@ class OpenStackServerSourceBase:
curr_oneOf = curr_schema.oneOf
curr_ref = curr_schema.ref
if curr_oneOf:
if schema_ref not in [x["$ref"] for x in curr_oneOf]:
if schema_ref not in [
x["$ref"] for x in curr_oneOf
]:
curr_oneOf.append({"$ref": schema_ref})
elif curr_ref and curr_ref != schema_ref:
rsp["content"]["application/json"]["schema"] = (
TypeSchema(
oneOf=[
{"$ref": curr_ref},
{"$ref": schema_ref},
]
)
rsp["content"]["application/json"][
"schema"
] = TypeSchema(
oneOf=[
{"$ref": curr_ref},
{"$ref": schema_ref},
]
)
else:
rsp["content"] = {
"application/json": {"schema": {"$ref": schema_ref}}
"application/json": {
"schema": {"$ref": schema_ref}
}
}
# Ensure operation tags are existing
@ -783,7 +825,9 @@ class OpenStackServerSourceBase:
if tag not in [x["name"] for x in openapi_spec.tags]:
openapi_spec.tags.append({"name": tag})
self._post_process_operation_hook(openapi_spec, operation_spec, path=path)
self._post_process_operation_hook(
openapi_spec, operation_spec, path=path
)
def _post_process_operation_hook(
self, openapi_spec, operation_spec, path: str | None = None
@ -818,7 +862,9 @@ class OpenStackServerSourceBase:
# Nova added empty params since it was never validating them. Skip
param_attrs["schema"] = TypeSchema(type="string")
elif spec["type"] == "array":
param_attrs["schema"] = TypeSchema(**copy.deepcopy(spec["items"]))
param_attrs["schema"] = TypeSchema(
**copy.deepcopy(spec["items"])
)
else:
param_attrs["schema"] = TypeSchema(**copy.deepcopy(spec))
param_attrs["description"] = spec.get("description")
@ -837,10 +883,14 @@ class OpenStackServerSourceBase:
**param_attrs,
)
if ref_name not in [x.ref for x in operation_spec.parameters]:
operation_spec.parameters.append(ParameterSchema(ref=ref_name))
operation_spec.parameters.append(
ParameterSchema(ref=ref_name)
)
else:
raise RuntimeError("Query parameters %s is not an object as expected" % obj)
raise RuntimeError(
f"Query parameters {obj} is not an object as expected"
)
def process_body_parameters(
self,
@ -863,7 +913,9 @@ class OpenStackServerSourceBase:
if action_name:
path_resource_names.append(action_name)
cont_schema_name = "".join([x.title() for x in path_resource_names]) + "Request"
cont_schema_name = (
"".join([x.title() for x in path_resource_names]) + "Request"
)
cont_schema = None
if body_schemas is not UNSET and len(body_schemas) == 1:
@ -872,7 +924,9 @@ class OpenStackServerSourceBase:
if True: # body_schemas[0] is not UNSET:
if cont_schema_name in openapi_spec.components.schemas:
# if we have already oneOf - add there
cont_schema = openapi_spec.components.schemas[cont_schema_name]
cont_schema = openapi_spec.components.schemas[
cont_schema_name
]
if cont_schema.oneOf and body_schemas[0] not in [
x["$ref"] for x in cont_schema.oneOf
]:
@ -892,7 +946,9 @@ class OpenStackServerSourceBase:
)
cont_schema = openapi_spec.components.schemas.setdefault(
cont_schema_name,
TypeSchema(oneOf=[], openstack={"discriminator": "microversion"}),
TypeSchema(
oneOf=[], openstack={"discriminator": "microversion"}
),
)
# Add new refs to the container oneOf if they are not already
# there
@ -949,7 +1005,9 @@ class OpenStackServerSourceBase:
js_content = op_body.setdefault(mime_type, {})
body_schema = js_content.setdefault("schema", {})
one_of = body_schema.setdefault("oneOf", [])
if schema_ref and schema_ref not in [x.get("$ref") for x in one_of]:
if schema_ref and schema_ref not in [
x.get("$ref") for x in one_of
]:
one_of.append({"$ref": schema_ref})
os_ext = body_schema.setdefault("x-openstack", {})
os_ext["discriminator"] = "action"
@ -959,11 +1017,13 @@ class OpenStackServerSourceBase:
op_body = operation_spec.requestBody.setdefault("content", {})
js_content = op_body.setdefault(mime_type, {})
body_schema = js_content.setdefault("schema", {})
operation_spec.requestBody["content"][mime_type]["schema"] = TypeSchema(
ref=schema_ref
operation_spec.requestBody["content"][mime_type]["schema"] = (
TypeSchema(ref=schema_ref)
)
def _sanitize_schema(self, schema, *, start_version=None, end_version=None):
def _sanitize_schema(
self, schema, *, start_version=None, end_version=None
):
"""Various schemas are broken in various ways"""
if isinstance(schema, dict):
@ -987,7 +1047,11 @@ class OpenStackServerSourceBase:
if typ == "array" and "additionalItems" in v:
# additionalItems have nothing to do under the type array (create servergroup)
schema.properties[k].pop("additionalItems")
if typ == "array" and "items" in v and isinstance(v["items"], list):
if (
typ == "array"
and "items" in v
and isinstance(v["items"], list)
):
# server_group create - type array "items" is a dict and not list
# NOTE: server_groups recently changed to "prefixItems",
# so this may be not necessary anymore
@ -1023,7 +1087,9 @@ class OpenStackServerSourceBase:
else:
os_ext = None
# Ensure global parameter is present
param = ParameterSchema(location=param_location, name=param_name, **param_attrs)
param = ParameterSchema(
location=param_location, name=param_name, **param_attrs
)
if param_location == "path":
param.required = True
if not param.description and path:
@ -1041,7 +1107,7 @@ class OpenStackServerSourceBase:
# Param is already present. Check whether we need to modify min_ver
min_ver = os_ext.get("min-ver")
max_ver = os_ext.get("max-ver")
param.openstack = dict()
param.openstack = {}
if not old_param.openstack:
old_param.openstack = {}
old_min_ver = old_param.openstack.get("min-ver")
@ -1054,7 +1120,8 @@ class OpenStackServerSourceBase:
if (
old_max_ver
and max_ver
and tuple(old_max_ver.split(".")) > tuple(max_ver.split("."))
and tuple(old_max_ver.split("."))
> tuple(max_ver.split("."))
):
# Existing param has max_ver higher then what we have now. Keep old value
os_ext["max_ver"] = old_max_ver
@ -1074,7 +1141,9 @@ class OpenStackServerSourceBase:
action_name=None,
) -> tuple[str | None, str | None]:
if schema_def is UNSET:
logging.warn("No Schema definition for %s[%s] is known", name, action_name)
logging.warn(
"No Schema definition for %s[%s] is known", name, action_name
)
# Create dummy schema since we got no data for it
schema_def = {
"type": "object",
@ -1082,10 +1151,7 @@ class OpenStackServerSourceBase:
}
if schema_def is not None:
schema = openapi_spec.components.schemas.setdefault(
name,
TypeSchema(
**schema_def,
),
name, TypeSchema(**schema_def)
)
if action_name:
@ -1156,7 +1222,9 @@ class OpenStackServerSourceBase:
if isinstance(expected_errors, list):
expected_errors = [
str(x)
for x in filter(lambda x: isinstance(x, int), expected_errors)
for x in filter(
lambda x: isinstance(x, int), expected_errors
)
]
elif isinstance(expected_errors, int):
expected_errors = [str(expected_errors)]
@ -1178,15 +1246,21 @@ class OpenStackServerSourceBase:
typ_name = (
"".join([x.title() for x in path_resource_names])
+ func.__name__.title()
+ (f"_{min_ver.replace('.', '')}" if min_ver else "")
+ (
f"_{min_ver.replace('.', '')}"
if min_ver
else ""
)
)
comp_schema = openapi_spec.components.schemas.setdefault(
typ_name,
self._sanitize_schema(
copy.deepcopy(obj),
start_version=start_version,
end_version=end_version,
),
comp_schema = (
openapi_spec.components.schemas.setdefault(
typ_name,
self._sanitize_schema(
copy.deepcopy(obj),
start_version=start_version,
end_version=end_version,
),
)
)
if min_ver:
@ -1290,13 +1364,15 @@ def _convert_wsme_to_jsonschema(body_spec):
elif basetype is int:
res = {"type": "integer"}
else:
raise RuntimeError("Unsupported basetype %s" % basetype)
raise RuntimeError(f"Unsupported basetype {basetype}")
res["enum"] = list(values)
# elif hasattr(body_spec, "__name__") and body_spec.__name__ == "bool":
elif wtypes.isdict(body_spec):
res = {
"type": "object",
"additionalProperties": _convert_wsme_to_jsonschema(body_spec.value_type),
"additionalProperties": _convert_wsme_to_jsonschema(
body_spec.value_type
),
}
elif wtypes.isusertype(body_spec):
basetype = body_spec.basetype
@ -1304,7 +1380,7 @@ def _convert_wsme_to_jsonschema(body_spec):
if basetype is str:
res = {"type": "string", "format": name}
else:
raise RuntimeError("Unsupported basetype %s" % basetype)
raise RuntimeError(f"Unsupported basetype {basetype}")
elif isinstance(body_spec, wtypes.wsproperty):
res = _convert_wsme_to_jsonschema(body_spec.datatype)
elif body_spec is bool:
@ -1312,9 +1388,12 @@ def _convert_wsme_to_jsonschema(body_spec):
res = {"type": "boolean"}
elif body_spec is float:
res = {"type": "number", "format": "float"}
elif isinstance(body_spec, wtypes.dt_types) or body_spec is datetime.datetime:
elif (
isinstance(body_spec, wtypes.dt_types)
or body_spec is datetime.datetime
):
res = {"type": "string", "format": "date-time"}
else:
raise RuntimeError("Unsupported object %s" % body_spec)
raise RuntimeError(f"Unsupported object {body_spec}")
return res

View File

@ -42,9 +42,7 @@ from codegenerator.openapi.utils import merge_api_ref_doc
class CinderV3Generator(OpenStackServerSourceBase):
URL_TAG_MAP = {
"/versions": "version",
}
URL_TAG_MAP = {"/versions": "version"}
RESOURCE_MODULES = [
attachment,
@ -122,24 +120,24 @@ class CinderV3Generator(OpenStackServerSourceBase):
openapi_spec = self.load_openapi(impl_path)
if not openapi_spec:
openapi_spec = SpecSchema(
info=dict(
title="OpenStack Block Storage API",
description=LiteralScalarString(
info={
"title": "OpenStack Block Storage API",
"description": LiteralScalarString(
"Volume API provided by Cinder service"
),
version=self.api_version,
),
"version": self.api_version,
},
openapi="3.1.0",
security=[{"ApiKeyAuth": []}],
components=dict(
securitySchemes={
components={
"securitySchemes": {
"ApiKeyAuth": {
"type": "apiKey",
"in": "header",
"name": "X-Auth-Token",
}
},
),
}
},
tags=[
{"name": k, "description": LiteralScalarString(v)}
for (k, v) in common.OPENAPI_TAGS.items()
@ -148,7 +146,9 @@ class CinderV3Generator(OpenStackServerSourceBase):
# Set global parameters
for name, definition in volume.VOLUME_PARAMETERS.items():
openapi_spec.components.parameters[name] = ParameterSchema(**definition)
openapi_spec.components.parameters[name] = ParameterSchema(
**definition
)
for route in self.router.map.matchlist:
# if route.routepath.startswith("/{project"):
@ -156,7 +156,9 @@ class CinderV3Generator(OpenStackServerSourceBase):
if route.routepath.endswith(".:(format)"):
continue
if route.routepath.startswith("/extensions") or route.routepath.startswith(
if route.routepath.startswith(
"/extensions"
) or route.routepath.startswith(
"/{project_id:[0-9a-f\-]+}/extensions"
):
if route.defaults.get("action") != "index":

View File

@ -104,7 +104,9 @@ ATTACHMENTS_DETAIL_SCHEMA: dict[str, Any] = {
}
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
def _post_process_operation_hook(
openapi_spec, operation_spec, path: str | None = None
):
"""Hook to allow service specific generator to modify details"""
operationId = operation_spec.operationId
if operationId in [
@ -128,11 +130,7 @@ def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str

View File

@ -180,7 +180,7 @@ BACKUPS_SCHEMA: dict[str, Any] = {
"type": "array",
"items": copy.deepcopy(BACKUP_SHORT_SCHEMA),
"description": "A list of backup objects.",
},
}
},
}
@ -192,7 +192,7 @@ BACKUPS_DETAIL_SCHEMA: dict[str, Any] = {
"type": "array",
"items": copy.deepcopy(BACKUP_SCHEMA),
"description": "A list of backup objects.",
},
}
},
}
@ -253,7 +253,9 @@ BACKUP_RECORD_SCHEMA: dict[str, Any] = {
}
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
def _post_process_operation_hook(
openapi_spec, operation_spec, path: str | None = None
):
"""Hook to allow service specific generator to modify details"""
operationId = operation_spec.operationId
if operationId in [
@ -263,18 +265,16 @@ def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None
"backups/detail:get",
]:
for key, val in BACKUP_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
openapi_spec.components.parameters.setdefault(
key, ParameterSchema(**val)
)
ref = f"#/components/parameters/{key}"
if ref not in [x.ref for x in operation_spec.parameters]:
operation_spec.parameters.append(ParameterSchema(ref=ref))
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str
@ -284,7 +284,9 @@ def _get_schema_ref(
)
ref = f"#/components/schemas/{name}"
elif name == "BackupsListResponse":
openapi_spec.components.schemas.setdefault(name, TypeSchema(**BACKUPS_SCHEMA))
openapi_spec.components.schemas.setdefault(
name, TypeSchema(**BACKUPS_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in [
"BackupsCreateResponse",
@ -297,14 +299,12 @@ def _get_schema_ref(
ref = f"#/components/schemas/{name}"
elif name == "BackupsImport_RecordResponse":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**BACKUP_SHORT_CONTAINER_SCHEMA),
name, TypeSchema(**BACKUP_SHORT_CONTAINER_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name == "BackupsRestoreResponse":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**BACKUP_RESTORE_RESPONSE_SCHEMA),
name, TypeSchema(**BACKUP_RESTORE_RESPONSE_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name == "BackupsExport_RecordResponse":

View File

@ -41,10 +41,7 @@ CLUSTER_SCHEMA: dict[str, Any] = {
"status": {
"type": "string",
"description": "The status of the cluster.",
"enum": [
"enabled",
"disabled",
],
"enum": ["enabled", "disabled"],
},
},
}
@ -101,10 +98,7 @@ CLUSTER_CONTAINER_SCHEMA: dict[str, Any] = {
CLUSTERS_SCHEMA: dict[str, Any] = {
"type": "object",
"properties": {
"clusters": {
"type": "array",
"items": copy.deepcopy(CLUSTER_SCHEMA),
}
"clusters": {"type": "array", "items": copy.deepcopy(CLUSTER_SCHEMA)}
},
}
@ -215,15 +209,16 @@ CLUSTERS_LIST_DETAIL_PARAMETERS: dict[str, Any] = {
}
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
def _post_process_operation_hook(
openapi_spec, operation_spec, path: str | None = None
):
"""Hook to allow service specific generator to modify details"""
operationId = operation_spec.operationId
if operationId in [
"project_id/clusters:get",
"clusters:get",
]:
if operationId in ["project_id/clusters:get", "clusters:get"]:
for key, val in CLUSTERS_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
openapi_spec.components.parameters.setdefault(
key, ParameterSchema(**val)
)
ref = f"#/components/parameters/{key}"
if ref not in [x.ref for x in operation_spec.parameters]:
operation_spec.parameters.append(ParameterSchema(ref=ref))
@ -233,18 +228,16 @@ def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None
"clusters/detail:get",
]:
for key, val in CLUSTERS_LIST_DETAIL_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
openapi_spec.components.parameters.setdefault(
key, ParameterSchema(**val)
)
ref = f"#/components/parameters/{key}"
if ref not in [x.ref for x in operation_spec.parameters]:
operation_spec.parameters.append(ParameterSchema(ref=ref))
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str
@ -254,17 +247,16 @@ def _get_schema_ref(
)
ref = f"#/components/schemas/{name}"
elif name == "ClustersListResponse":
openapi_spec.components.schemas.setdefault(name, TypeSchema(**CLUSTERS_SCHEMA))
openapi_spec.components.schemas.setdefault(
name, TypeSchema(**CLUSTERS_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name == "ClusterUpdateRequest":
openapi_spec.components.schemas.setdefault(
name, TypeSchema(**CLUSTER_UPDATE_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in [
"ClusterShowResponse",
"ClusterUpdateResponse",
]:
elif name in ["ClusterShowResponse", "ClusterUpdateResponse"]:
openapi_spec.components.schemas.setdefault(
name, TypeSchema(**CLUSTER_CONTAINER_SCHEMA)
)

View File

@ -156,7 +156,7 @@ LINKS_SCHEMA: dict[str, Any] = {
METADATA_SCHEMA: dict[str, Any] = {
"type": "object",
"patternProperties": {
"^[a-zA-Z0-9-_:. /]{1,255}$": {"type": "string", "maxLength": 255},
"^[a-zA-Z0-9-_:. /]{1,255}$": {"type": "string", "maxLength": 255}
},
"additionalProperties": False,
"description": "A metadata object. Contains one or more metadata key and value pairs that are associated with the resource.",

View File

@ -45,12 +45,14 @@ EXTENSIONS_SCHEMA: dict[str, Any] = {
"extensions": {
"type": "array",
"items": copy.deepcopy(EXTENSION_SCHEMA),
},
}
},
}
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
def _post_process_operation_hook(
openapi_spec, operation_spec, path: str | None = None
):
"""Hook to allow service specific generator to modify details"""
operationId = operation_spec.operationId
if operationId in [
@ -65,11 +67,7 @@ def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str

View File

@ -65,10 +65,7 @@ GROUP_SCHEMA: dict[str, Any] = {
"type": "object",
"description": "A volume group object.",
"properties": {
"name": {
"type": ["string", "null"],
"description": "The group name.",
},
"name": {"type": ["string", "null"], "description": "The group name."},
"id": {
"type": "string",
"format": "uuid",
@ -145,10 +142,7 @@ GROUPS_SCHEMA: dict[str, Any] = {
"type": "object",
"description": "A container with list of group objects.",
"properties": {
"groups": {
"type": "array",
"items": copy.deepcopy(GROUP_SCHEMA),
},
"groups": {"type": "array", "items": copy.deepcopy(GROUP_SCHEMA)}
},
}
@ -159,7 +153,7 @@ GROUPS_DETAIL_SCHEMA: dict[str, Any] = {
"groups": {
"type": "array",
"items": copy.deepcopy(GROUP_DETAIL_SCHEMA),
},
}
},
}
@ -207,7 +201,9 @@ GROUP_REPLICATION_TARGETS_SCHEMA: dict[str, Any] = {
}
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
def _post_process_operation_hook(
openapi_spec, operation_spec, path: str | None = None
):
"""Hook to allow service specific generator to modify details"""
operationId = operation_spec.operationId
if operationId in [
@ -217,18 +213,16 @@ def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None
"groups/detail:get",
]:
for key, val in GROUP_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
openapi_spec.components.parameters.setdefault(
key, ParameterSchema(**val)
)
ref = f"#/components/parameters/{key}"
if ref not in [x.ref for x in operation_spec.parameters]:
operation_spec.parameters.append(ParameterSchema(ref=ref))
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str
@ -238,12 +232,11 @@ def _get_schema_ref(
)
ref = f"#/components/schemas/{name}"
elif name == "GroupsListResponse":
openapi_spec.components.schemas.setdefault(name, TypeSchema(**GROUPS_SCHEMA))
openapi_spec.components.schemas.setdefault(
name, TypeSchema(**GROUPS_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in [
"GroupsCreateResponse",
"GroupShowResponse",
]:
elif name in ["GroupsCreateResponse", "GroupShowResponse"]:
openapi_spec.components.schemas.setdefault(
name, TypeSchema(**GROUP_CONTAINER_SCHEMA)
)

View File

@ -126,7 +126,7 @@ GROUP_SNAPSHOTS_SCHEMA: dict[str, Any] = {
"group_snapshots": {
"type": "array",
"items": copy.deepcopy(GROUP_SNAPSHOT_SCHEMA),
},
}
},
}
@ -137,7 +137,7 @@ GROUP_SNAPSHOTS_DETAIL_SCHEMA: dict[str, Any] = {
"group_snapshots": {
"type": "array",
"items": copy.deepcopy(GROUP_SNAPSHOT_DETAIL_SCHEMA),
},
}
},
}
@ -147,7 +147,9 @@ GROUP_SNAPSHOT_CONTAINER_SCHEMA: dict[str, Any] = {
}
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
def _post_process_operation_hook(
openapi_spec, operation_spec, path: str | None = None
):
"""Hook to allow service specific generator to modify details"""
operationId = operation_spec.operationId
if operationId in [
@ -157,18 +159,16 @@ def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None
"group_snapshots/detail:get",
]:
for key, val in GROUP_SNAPSHOT_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
openapi_spec.components.parameters.setdefault(
key, ParameterSchema(**val)
)
ref = f"#/components/parameters/{key}"
if ref not in [x.ref for x in operation_spec.parameters]:
operation_spec.parameters.append(ParameterSchema(ref=ref))
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str

View File

@ -104,7 +104,7 @@ GROUP_TYPES_SCHEMA: dict[str, Any] = {
"group_types": {
"type": "array",
"items": copy.deepcopy(GROUP_TYPE_SCHEMA),
},
}
},
}
@ -114,7 +114,9 @@ GROUP_TYPE_CONTAINER_SCHEMA: dict[str, Any] = {
}
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
def _post_process_operation_hook(
openapi_spec, operation_spec, path: str | None = None
):
"""Hook to allow service specific generator to modify details"""
operationId = operation_spec.operationId
if operationId in [
@ -124,18 +126,16 @@ def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None
"group_types/detail:get",
]:
for key, val in GROUP_TYPE_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
openapi_spec.components.parameters.setdefault(
key, ParameterSchema(**val)
)
ref = f"#/components/parameters/{key}"
if ref not in [x.ref for x in operation_spec.parameters]:
operation_spec.parameters.append(ParameterSchema(ref=ref))
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str

View File

@ -109,10 +109,7 @@ HOSTS_SCHEMA: dict[str, Any] = {
"type": "object",
"description": "A container with list of host objects.",
"properties": {
"hosts": {
"type": "array",
"items": copy.deepcopy(HOST_SCHEMA),
},
"hosts": {"type": "array", "items": copy.deepcopy(HOST_SCHEMA)}
},
}
@ -120,27 +117,18 @@ HOSTS_DETAIL_SCHEMA: dict[str, Any] = {
"type": "object",
"description": "A container with list of host objects.",
"properties": {
"hosts": {
"type": "array",
"items": copy.deepcopy(HOST_DETAIL_SCHEMA),
},
"hosts": {"type": "array", "items": copy.deepcopy(HOST_DETAIL_SCHEMA)}
},
}
HOST_CONTAINER_SCHEMA: dict[str, Any] = {
"type": "object",
"properties": {
"host": {"type": "array", "items": HOST_DETAIL_SCHEMA},
},
"properties": {"host": {"type": "array", "items": HOST_DETAIL_SCHEMA}},
}
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str

View File

@ -93,16 +93,14 @@ LIMITS_SCHEMA: dict[str, Any] = {
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str
if name == "LimitsListResponse":
openapi_spec.components.schemas.setdefault(name, TypeSchema(**LIMITS_SCHEMA))
openapi_spec.components.schemas.setdefault(
name, TypeSchema(**LIMITS_SCHEMA)
)
ref = f"#/components/schemas/{name}"
else:
return (None, None, False)

View File

@ -88,13 +88,12 @@ MESSAGE_CONTAINER_SCHEMA: dict[str, Any] = {
}
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
def _post_process_operation_hook(
openapi_spec, operation_spec, path: str | None = None
):
"""Hook to allow service specific generator to modify details"""
operationId = operation_spec.operationId
if operationId in [
"project_id/messages:get",
"messages:get",
]:
if operationId in ["project_id/messages:get", "messages:get"]:
for pname in [
"sort",
"sort_key",
@ -109,16 +108,14 @@ def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str
if name == "MessagesListResponse":
openapi_spec.components.schemas.setdefault(name, TypeSchema(**MESSAGES_SCHEMA))
openapi_spec.components.schemas.setdefault(
name, TypeSchema(**MESSAGES_SCHEMA)
)
ref = f"#/components/schemas/{name}"
if name == "MessageShowResponse":
openapi_spec.components.schemas.setdefault(

View File

@ -20,10 +20,7 @@ from codegenerator.common.schema import TypeSchema
QOS_SPEC_SCHEMA: dict[str, Any] = {
"type": "object",
"properties": {
"consumer": {
"type": "string",
"description": "The consumer type.",
},
"consumer": {"type": "string", "description": "The consumer type."},
"specs": {
"type": ["object", "null"],
"description": "A specs object.",
@ -120,16 +117,17 @@ QOS_SPEC_LIST_PARAMETERS: dict[str, Any] = {
}
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
def _post_process_operation_hook(
openapi_spec, operation_spec, path: str | None = None
):
"""Hook to allow service specific generator to modify details"""
operationId = operation_spec.operationId
if operationId in [
"project_id/qos-specs:get",
"qos-specs:get",
]:
if operationId in ["project_id/qos-specs:get", "qos-specs:get"]:
for key, val in QOS_SPEC_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
openapi_spec.components.parameters.setdefault(
key, ParameterSchema(**val)
)
ref = f"#/components/parameters/{key}"
if ref not in [x.ref for x in operation_spec.parameters]:
operation_spec.parameters.append(ParameterSchema(ref=ref))
@ -147,19 +145,14 @@ def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str
if name == "Qos_SpecsListResponse":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**QOS_SPECS_SCHEMA),
name, TypeSchema(**QOS_SPECS_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in [
@ -168,16 +161,12 @@ def _get_schema_ref(
"Qos_SpecUpdateResponse",
]:
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**QOS_SPEC_CONTAINER_SCHEMA),
name, TypeSchema(**QOS_SPEC_CONTAINER_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in [
"Qos_SpecsAssociationsResponse",
]:
elif name in ["Qos_SpecsAssociationsResponse"]:
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**QOS_SPEC_ASSOCIATIONS_SCHEMA),
name, TypeSchema(**QOS_SPEC_ASSOCIATIONS_SCHEMA)
)
ref = f"#/components/schemas/{name}"

View File

@ -26,9 +26,7 @@ RESOURCE_FILTERS_SCHEMA: dict[str, Any] = {
"filters": {
"type": "array",
"description": "The resource filter array",
"items": {
"type": "string",
},
"items": {"type": "string"},
},
"resource": {
"type": "string",
@ -42,11 +40,7 @@ RESOURCE_FILTERS_SCHEMA: dict[str, Any] = {
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str

View File

@ -135,7 +135,7 @@ SNAPSHOTS_SCHEMA: dict[str, Any] = {
"type": "array",
"items": copy.deepcopy(SNAPSHOT_SCHEMA),
"description": "A list of volume objects.",
},
}
},
"required": ["snapshots"],
"additionalProperties": False,
@ -149,14 +149,16 @@ SNAPSHOTS_DETAIL_SCHEMA: dict[str, Any] = {
"type": "array",
"items": copy.deepcopy(SNAPSHOT_DETAIL_SCHEMA),
"description": "A list of snapshot objects.",
},
}
},
"required": ["snapshots"],
"additionalProperties": False,
}
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
def _post_process_operation_hook(
openapi_spec, operation_spec, path: str | None = None
):
"""Hook to allow service specific generator to modify details"""
operationId = operation_spec.operationId
if operationId in [
@ -182,17 +184,15 @@ def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str
# ### Snapshot
if name == "SnapshotsListResponse":
openapi_spec.components.schemas.setdefault(name, TypeSchema(**SNAPSHOTS_SCHEMA))
openapi_spec.components.schemas.setdefault(
name, TypeSchema(**SNAPSHOTS_SCHEMA)
)
ref = f"#/components/schemas/{name}"
if name == "SnapshotsDetailResponse":
openapi_spec.components.schemas.setdefault(

View File

@ -29,7 +29,7 @@ MANAGEABLE_SNAPSHOT_SCHEMA: dict[str, Any] = {
"source-name": {
"type": "string",
"description": "The resource's name.",
},
}
},
},
"safe_to_manage": {
@ -53,12 +53,7 @@ MANAGEABLE_SNAPSHOT_SCHEMA: dict[str, Any] = {
},
},
"additionalProperties": False,
"required": [
"source_reference",
"safe_to_manage",
"reference",
"size",
],
"required": ["source_reference", "safe_to_manage", "reference", "size"],
}
MANAGEABLE_SNAPSHOT_DETAIL_SCHEMA: dict[str, Any] = {
@ -108,7 +103,9 @@ MANAGEABLE_SNAPSHOT_CREATE_REQUEST_SCHEMA: dict[str, Any] = copy.deepcopy(
MANAGEABLE_SNAPSHOT_CREATE_REQUEST_SCHEMA["properties"].pop("type", None)
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
def _post_process_operation_hook(
openapi_spec, operation_spec, path: str | None = None
):
"""Hook to allow service specific generator to modify details"""
operationId = operation_spec.operationId
if operationId in [
@ -131,11 +128,7 @@ def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str

View File

@ -112,10 +112,7 @@ VOLUME_SCHEMA: dict[str, Any] = {
"type": "boolean",
"description": "If true, this volume can attach to more than one instance.",
},
"status": {
"type": "string",
"description": "The volume status.",
},
"status": {"type": "string", "description": "The volume status."},
"migration_status": {
"type": "string",
"description": "The volume migration status. Admin only.",
@ -215,7 +212,7 @@ VOLUMES_SCHEMA: dict[str, Any] = {
"type": "array",
"items": copy.deepcopy(VOLUME_SHORT_SCHEMA),
"description": "A list of volume objects.",
},
}
},
}
@ -227,7 +224,7 @@ VOLUMES_DETAIL_SCHEMA: dict[str, Any] = {
"type": "array",
"items": copy.deepcopy(VOLUME_SCHEMA),
"description": "A list of volume objects.",
},
}
},
}
@ -235,34 +232,25 @@ VOLUME_PARAMETERS: dict[str, Any] = {
"all_tenants": {
"in": "query",
"name": "all_tenants",
"schema": {
"type": "boolean",
},
"schema": {"type": "boolean"},
"description": "Shows details for all project. Admin only.",
},
"sort": {
"in": "query",
"name": "sort",
"schema": {
"type": "string",
},
"schema": {"type": "string"},
"description": "Comma-separated list of sort keys and optional sort directions in the form of < key > [: < direction > ]. A valid direction is asc (ascending) or desc (descending).",
},
"sort_key": {
"in": "query",
"name": "sort_key",
"schema": {
"type": "string",
},
"schema": {"type": "string"},
"description": "Sorts by an attribute. A valid value is name, status, container_format, disk_format, size, id, created_at, or updated_at. Default is created_at. The API uses the natural sorting direction of the sort_key attribute value. Deprecated in favour of the combined sort parameter.",
},
"sort_dir": {
"in": "query",
"name": "sort_dir",
"schema": {
"type": "string",
"enum": ["asc", "desc"],
},
"schema": {"type": "string", "enum": ["asc", "desc"]},
"description": "Sorts by one or more sets of attribute and sort direction combinations. If you omit the sort direction in a set, default is desc. Deprecated in favour of the combined sort parameter.",
},
"limit": {
@ -274,55 +262,40 @@ VOLUME_PARAMETERS: dict[str, Any] = {
"offset": {
"in": "query",
"name": "offset",
"schema": {
"type": "integer",
},
"schema": {"type": "integer"},
"description": "Used in conjunction with limit to return a slice of items. offset is where to start in the list.",
},
"marker": {
"in": "query",
"name": "marker",
"schema": {
"type": "string",
"format": "uuid",
},
"schema": {"type": "string", "format": "uuid"},
"description": "The ID of the last-seen item. Use the limit parameter to make an initial limited request and use the ID of the last-seen item from the response as the marker parameter value in a subsequent limited request.",
},
"with_count": {
"in": "query",
"name": "with_count",
"schema": {
"type": "boolean",
},
"schema": {"type": "boolean"},
"description": "Whether to show count in API response or not, default is False.",
"x-openstack": {"min-ver": "3.45"},
},
"created_at": {
"in": "query",
"name": "created_at",
"schema": {
"type": "string",
"format": "date-time",
},
"schema": {"type": "string", "format": "date-time"},
"description": "Filters reuslts by a time that resources are created at with time comparison operators: gt/gte/eq/neq/lt/lte.",
"x-openstack": {"min-ver": "3.60"},
},
"updated_at": {
"in": "query",
"name": "updated_at",
"schema": {
"type": "string",
"format": "date-time",
},
"schema": {"type": "string", "format": "date-time"},
"description": "Filters reuslts by a time that resources are updated at with time comparison operators: gt/gte/eq/neq/lt/lte.",
"x-openstack": {"min-ver": "3.60"},
},
"consumes_quota": {
"in": "query",
"name": "consumes_quota",
"schema": {
"type": "boolean",
},
"schema": {"type": "boolean"},
"description": "Filters results by consumes_quota field. Resources that dont use quotas are usually temporary internal resources created to perform an operation. Default is to not filter by it. Filtering by this option may not be always possible in a cloud, see List Resource Filters to determine whether this filter is available in your cloud.",
"x-openstack": {"min-ver": "3.65"},
},
@ -388,7 +361,9 @@ VOLUME_UPLOAD_IMAGE_RESPONSE_SCHEMA: dict[str, Any] = {
}
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
def _post_process_operation_hook(
openapi_spec, operation_spec, path: str | None = None
):
"""Hook to allow service specific generator to modify details"""
operationId = operation_spec.operationId
if operationId in [
@ -413,29 +388,23 @@ def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None
ref = f"#/components/parameters/{pname}"
if ref not in [x.ref for x in operation_spec.parameters]:
operation_spec.parameters.append(ParameterSchema(ref=ref))
elif operationId in [
"project_id/volumes/summary:get",
]:
for pname in [
"all_tenants",
]:
elif operationId in ["project_id/volumes/summary:get"]:
for pname in ["all_tenants"]:
ref = f"#/components/parameters/{pname}"
if ref not in [x.ref for x in operation_spec.parameters]:
operation_spec.parameters.append(ParameterSchema(ref=ref))
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str
# ### Volume
if name == "VolumesListResponse":
openapi_spec.components.schemas.setdefault(name, TypeSchema(**VOLUMES_SCHEMA))
openapi_spec.components.schemas.setdefault(
name, TypeSchema(**VOLUMES_SCHEMA)
)
ref = f"#/components/schemas/{name}"
if name == "VolumesDetailResponse":
openapi_spec.components.schemas.setdefault(
@ -506,8 +475,7 @@ def _get_schema_ref(
return (None, None, True)
elif name == "VolumesActionOs-Volume_Upload_ImageResponse":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**VOLUME_UPLOAD_IMAGE_RESPONSE_SCHEMA),
name, TypeSchema(**VOLUME_UPLOAD_IMAGE_RESPONSE_SCHEMA)
)
ref = f"#/components/schemas/{name}"
else:

View File

@ -87,7 +87,9 @@ MANAGEABLE_VOLUMES_DETAIL_SCHEMA: dict[str, Any] = {
}
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
def _post_process_operation_hook(
openapi_spec, operation_spec, path: str | None = None
):
"""Hook to allow service specific generator to modify details"""
operationId = operation_spec.operationId
if operationId in [
@ -110,11 +112,7 @@ def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str

View File

@ -98,7 +98,9 @@ VOLUME_TRANSFER_CONTAINER_SCHEMA: dict[str, Any] = {
VOLUME_TRANSFERS_SCHEMA: dict[str, Any] = {
"type": "object",
"properties": {"transfers": {"type": "array", "items": VOLUME_TRANSFER_SCHEMA}},
"properties": {
"transfers": {"type": "array", "items": VOLUME_TRANSFER_SCHEMA}
},
}
OS_VOLUME_TRANSFERS_DETAIL_SCHEMA: dict[str, Any] = {
@ -170,7 +172,9 @@ VOLUME_TRANSFER_LIST_PARAMETERS: dict[str, Any] = {
}
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
def _post_process_operation_hook(
openapi_spec, operation_spec, path: str | None = None
):
"""Hook to allow service specific generator to modify details"""
operationId = operation_spec.operationId
@ -179,7 +183,9 @@ def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None
"volume-transfers/detail:get",
]:
for key, val in VOLUME_TRANSFER_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
openapi_spec.components.parameters.setdefault(
key, ParameterSchema(**val)
)
ref = f"#/components/parameters/{key}"
if ref not in [x.ref for x in operation_spec.parameters]:
operation_spec.parameters.append(ParameterSchema(ref=ref))
@ -191,7 +197,9 @@ def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None
# structure and just copy single param.
key = "transfer_all_tenants"
val = VOLUME_TRANSFER_LIST_PARAMETERS[key]
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
openapi_spec.components.parameters.setdefault(
key, ParameterSchema(**val)
)
ref = f"#/components/parameters/{key}"
if ref not in [x.ref for x in operation_spec.parameters]:
operation_spec.parameters.append(ParameterSchema(ref=ref))
@ -200,25 +208,19 @@ def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str
if name == "Os_Volume_TransferListResponse":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**VOLUME_TRANSFERS_SCHEMA),
name, TypeSchema(**VOLUME_TRANSFERS_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name == "Os_Volume_TransferDetailResponse":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**OS_VOLUME_TRANSFERS_DETAIL_SCHEMA),
name, TypeSchema(**OS_VOLUME_TRANSFERS_DETAIL_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in [
@ -227,21 +229,18 @@ def _get_schema_ref(
"Os_Volume_TransferShowResponse",
]:
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**OS_VOLUME_TRANSFER_CONTAINER_SCHEMA),
name, TypeSchema(**OS_VOLUME_TRANSFER_CONTAINER_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name == "Volume_TransfersListResponse":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**VOLUME_TRANSFERS_SCHEMA),
name, TypeSchema(**VOLUME_TRANSFERS_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name == "Volume_TransfersDetailResponse":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**VOLUME_TRANSFERS_DETAIL_SCHEMA),
name, TypeSchema(**VOLUME_TRANSFERS_DETAIL_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in [
@ -250,8 +249,7 @@ def _get_schema_ref(
"Volume_TransferShowResponse",
]:
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**VOLUME_TRANSFER_CONTAINER_SCHEMA),
name, TypeSchema(**VOLUME_TRANSFER_CONTAINER_SCHEMA)
)
ref = f"#/components/schemas/{name}"

View File

@ -60,7 +60,9 @@ VOLUME_TYPE_CONTAINER_SCHEMA: dict[str, Any] = {
VOLUME_TYPES_SCHEMA: dict[str, Any] = {
"type": "object",
"properties": {"volume_types": {"type": "array", "items": VOLUME_TYPE_SCHEMA}},
"properties": {
"volume_types": {"type": "array", "items": VOLUME_TYPE_SCHEMA}
},
}
VOLUME_TYPE_LIST_PARAMETERS: dict[str, Any] = {
@ -114,7 +116,7 @@ VOLUME_TYPE_EXTRA_SPECS_SCHEMA: dict[str, Any] = {
"extra_specs": {
"description": "A key and value pair that contains additional specifications that are associated with the volume type. Examples include capabilities, capacity, compression, and so on, depending on the storage driver in use.",
**parameter_types.extra_specs_with_no_spaces_key,
},
}
},
}
@ -204,7 +206,7 @@ VOLUME_TYPE_ENCRYPTION_SHOW_SCHEMA: dict[str, Any] = {
"cipher": {
"type": "string",
"description": "The encryption algorithm or mode. For example, aes-xts-plain64. The default value is None.",
},
}
},
}
@ -226,7 +228,9 @@ DEFAULT_TYPE_SCHEMA: dict[str, Any] = {
DEFAULT_TYPES_SCHEMA: dict[str, Any] = {
"type": "object",
"properties": {"default_types": {"type": "array", "items": DEFAULT_TYPE_SCHEMA}},
"properties": {
"default_types": {"type": "array", "items": DEFAULT_TYPE_SCHEMA}
},
}
DEFAULT_TYPE_CONTAINER_SCHEMA: dict[str, Any] = {
@ -235,26 +239,24 @@ DEFAULT_TYPE_CONTAINER_SCHEMA: dict[str, Any] = {
}
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
def _post_process_operation_hook(
openapi_spec, operation_spec, path: str | None = None
):
"""Hook to allow service specific generator to modify details"""
operationId = operation_spec.operationId
if operationId in [
"project_id/types:get",
]:
if operationId in ["project_id/types:get"]:
for key, val in VOLUME_TYPE_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
openapi_spec.components.parameters.setdefault(
key, ParameterSchema(**val)
)
ref = f"#/components/parameters/{key}"
if ref not in [x.ref for x in operation_spec.parameters]:
operation_spec.parameters.append(ParameterSchema(ref=ref))
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str
@ -262,8 +264,7 @@ def _get_schema_ref(
# ### Volume Type
if name == "TypesListResponse":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**VOLUME_TYPES_SCHEMA),
name, TypeSchema(**VOLUME_TYPES_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in [
@ -272,8 +273,7 @@ def _get_schema_ref(
"TypeUpdateResponse",
]:
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**VOLUME_TYPE_CONTAINER_SCHEMA),
name, TypeSchema(**VOLUME_TYPE_CONTAINER_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in [
@ -281,8 +281,7 @@ def _get_schema_ref(
"TypesExtra_SpecsCreateResponse",
]:
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**VOLUME_TYPE_EXTRA_SPECS_SCHEMA),
name, TypeSchema(**VOLUME_TYPE_EXTRA_SPECS_SCHEMA)
)
ref = f"#/components/schemas/{name}"
@ -291,15 +290,13 @@ def _get_schema_ref(
"TypesExtra_SpecUpdateResponse",
]:
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**VOLUME_TYPE_EXTRA_SPEC_SCHEMA),
name, TypeSchema(**VOLUME_TYPE_EXTRA_SPEC_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name == "TypesOs_Volume_Type_AccessListResponse":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**VOLUME_TYPE_ACCESS_SCHEMA),
name, TypeSchema(**VOLUME_TYPE_ACCESS_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in [
@ -312,14 +309,12 @@ def _get_schema_ref(
# this is not really a list operation, but who cares
elif name == "TypesEncryptionListResponse":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**VOLUME_TYPE_ENCRYPTION_SCHEMA),
name, TypeSchema(**VOLUME_TYPE_ENCRYPTION_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name == "TypesEncryptionShowResponse":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**VOLUME_TYPE_ENCRYPTION_SHOW_SCHEMA),
name, TypeSchema(**VOLUME_TYPE_ENCRYPTION_SHOW_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in [
@ -327,14 +322,12 @@ def _get_schema_ref(
"TypesEncryptionUpdateResponse",
]:
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**VOLUME_TYPE_ENCRYPTION_CONTAINER_SCHEMA),
name, TypeSchema(**VOLUME_TYPE_ENCRYPTION_CONTAINER_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name == "Default_TypesListResponse":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**DEFAULT_TYPES_SCHEMA),
name, TypeSchema(**DEFAULT_TYPES_SCHEMA)
)
ref = f"#/components/schemas/{name}"
@ -343,8 +336,7 @@ def _get_schema_ref(
"Default_TypeDetailResponse",
]:
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**DEFAULT_TYPE_CONTAINER_SCHEMA),
name, TypeSchema(**DEFAULT_TYPE_CONTAINER_SCHEMA)
)
ref = f"#/components/schemas/{name}"

View File

@ -91,7 +91,9 @@ IMAGE_PARAMETERS = {
"status": {
"in": "query",
"name": "status",
"description": LiteralScalarString("Filters the response by an image status."),
"description": LiteralScalarString(
"Filters the response by an image status."
),
"schema": {"type": "string"},
},
"size_min": {
@ -124,9 +126,7 @@ IMAGE_PARAMETERS = {
"description": LiteralScalarString(
'When true, filters the response to display only "hidden" images. By default, "hidden" images are not included in the image-list response. (Since Image API v2.7)'
),
"schema": {
"type": "boolean",
},
"schema": {"type": "boolean"},
"x-openstack": {"min-ver": "2.7"},
},
"sort_key": {
@ -287,29 +287,31 @@ class GlanceGenerator(OpenStackServerSourceBase):
openapi_spec = self.load_openapi(impl_path)
if not openapi_spec:
openapi_spec = SpecSchema(
info=dict(
title="OpenStack Image API",
description=LiteralScalarString(
info={
"title": "OpenStack Image API",
"description": LiteralScalarString(
"Image API provided by Glance service"
),
version=self.api_version,
),
"version": self.api_version,
},
openapi="3.1.0",
security=[{"ApiKeyAuth": []}],
components=dict(
securitySchemes={
components={
"securitySchemes": {
"ApiKeyAuth": {
"type": "apiKey",
"in": "header",
"name": "X-Auth-Token",
}
},
),
}
},
)
# Set global headers and parameters
for name, definition in IMAGE_PARAMETERS.items():
openapi_spec.components.parameters[name] = ParameterSchema(**definition)
openapi_spec.components.parameters[name] = ParameterSchema(
**definition
)
for name, definition in IMAGE_HEADERS.items():
openapi_spec.components.headers[name] = HeaderSchema(**definition)
@ -372,7 +374,9 @@ class GlanceGenerator(OpenStackServerSourceBase):
key = "OpenStack-image-store-ids"
ref = f"#/components/headers/{key}"
operation_spec.responses["201"].setdefault("headers", {})
operation_spec.responses["201"]["headers"].update({key: {"$ref": ref}})
operation_spec.responses["201"]["headers"].update(
{key: {"$ref": ref}}
)
elif operationId == "images/image_id/file:put":
for ref in [
@ -382,21 +386,17 @@ class GlanceGenerator(OpenStackServerSourceBase):
if ref not in [x.ref for x in operation_spec.parameters]:
operation_spec.parameters.append(ParameterSchema(ref=ref))
elif operationId == "images/image_id/file:get":
for ref in [
"#/components/parameters/range",
]:
for ref in ["#/components/parameters/range"]:
if ref not in [x.ref for x in operation_spec.parameters]:
operation_spec.parameters.append(ParameterSchema(ref=ref))
for code in ["200", "206"]:
operation_spec.responses[code].setdefault("headers", {})
for hdr in ["Content-Type", "Content-Md5", "Content-Length"]:
operation_spec.responses[code]["headers"].setdefault(
hdr,
{"$ref": f"#/components/headers/{hdr}"},
hdr, {"$ref": f"#/components/headers/{hdr}"}
)
operation_spec.responses["206"]["headers"].setdefault(
"Content-Range",
{"$ref": "#/components/headers/Content-Range"},
"Content-Range", {"$ref": "#/components/headers/Content-Range"}
)
def _get_schema_ref(
@ -433,7 +433,9 @@ class GlanceGenerator(OpenStackServerSourceBase):
"type": "array",
"items": {
"type": "object",
"properties": copy.deepcopy(schema_def.properties),
"properties": copy.deepcopy(
schema_def.properties
),
},
},
},
@ -451,7 +453,9 @@ class GlanceGenerator(OpenStackServerSourceBase):
openapi_spec.components.schemas.setdefault(
name,
self._get_glance_schema(
glance_schema.CollectionSchema("tasks", tasks.get_task_schema()),
glance_schema.CollectionSchema(
"tasks", tasks.get_task_schema()
),
name,
),
)
@ -470,7 +474,9 @@ class GlanceGenerator(OpenStackServerSourceBase):
"uri": {"type": "string"},
"glance_image_id": {"type": "string"},
"glance_region": {"type": "string"},
"glance_service_interface": {"type": "string"},
"glance_service_interface": {
"type": "string"
},
},
},
"stores": {
@ -496,12 +502,12 @@ class GlanceGenerator(OpenStackServerSourceBase):
elif name == "ImagesMembersListResponse":
openapi_spec.components.schemas.setdefault(
name,
self._get_glance_schema(image_members.get_collection_schema(), name),
self._get_glance_schema(
image_members.get_collection_schema(), name
),
)
ref = f"#/components/schemas/{name}"
elif name in [
"InfoImportGet_Image_ImportResponse",
]:
elif name in ["InfoImportGet_Image_ImportResponse"]:
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(
@ -524,9 +530,7 @@ class GlanceGenerator(OpenStackServerSourceBase):
),
)
ref = f"#/components/schemas/{name}"
elif name in [
"InfoStoresGet_StoresResponse",
]:
elif name in ["InfoStoresGet_StoresResponse"]:
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(
@ -549,9 +553,7 @@ class GlanceGenerator(OpenStackServerSourceBase):
),
)
ref = f"#/components/schemas/{name}"
elif name in [
"InfoStoresDetailGet_Stores_DetailResponse",
]:
elif name in ["InfoStoresDetailGet_Stores_DetailResponse"]:
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(
@ -580,9 +582,7 @@ class GlanceGenerator(OpenStackServerSourceBase):
),
)
ref = f"#/components/schemas/{name}"
elif name in [
"MetadefsNamespacesListResponse",
]:
elif name in ["MetadefsNamespacesListResponse"]:
openapi_spec.components.schemas.setdefault(
name,
self._get_glance_schema(
@ -590,17 +590,15 @@ class GlanceGenerator(OpenStackServerSourceBase):
),
)
ref = f"#/components/schemas/{name}"
elif name in [
"MetadefsNamespacesObjectsListResponse",
]:
elif name in ["MetadefsNamespacesObjectsListResponse"]:
openapi_spec.components.schemas.setdefault(
name,
self._get_glance_schema(metadef_objects.get_collection_schema(), name),
self._get_glance_schema(
metadef_objects.get_collection_schema(), name
),
)
ref = f"#/components/schemas/{name}"
elif name in [
"MetadefsNamespacesPropertiesListResponse",
]:
elif name in ["MetadefsNamespacesPropertiesListResponse"]:
openapi_spec.components.schemas.setdefault(
name,
self._get_glance_schema(
@ -608,9 +606,7 @@ class GlanceGenerator(OpenStackServerSourceBase):
),
)
ref = f"#/components/schemas/{name}"
elif name in [
"MetadefsResource_TypesListResponse",
]:
elif name in ["MetadefsResource_TypesListResponse"]:
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(
@ -648,9 +644,7 @@ class GlanceGenerator(OpenStackServerSourceBase):
),
)
ref = f"#/components/schemas/{name}"
elif name in [
"MetadefsNamespacesResource_TypesShowResponse",
]:
elif name in ["MetadefsNamespacesResource_TypesShowResponse"]:
openapi_spec.components.schemas.setdefault(
name,
self._get_glance_schema(
@ -658,12 +652,12 @@ class GlanceGenerator(OpenStackServerSourceBase):
),
)
ref = f"#/components/schemas/{name}"
elif name in [
"MetadefsNamespacesTagsListResponse",
]:
elif name in ["MetadefsNamespacesTagsListResponse"]:
openapi_spec.components.schemas.setdefault(
name,
self._get_glance_schema(metadef_tags.get_collection_schema(), name),
self._get_glance_schema(
metadef_tags.get_collection_schema(), name
),
)
ref = f"#/components/schemas/{name}"
elif name == "ImageUpdateRequest":
@ -674,26 +668,19 @@ class GlanceGenerator(OpenStackServerSourceBase):
# ),
# )
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**{"type": "string", "format": "RFC 6902"}),
name, TypeSchema(**{"type": "string", "format": "RFC 6902"})
)
mime_type = "application/openstack-images-v2.1-json-patch"
ref = f"#/components/schemas/{name}"
elif name in [
"ImagesFileUploadRequest",
]:
elif name in ["ImagesFileUploadRequest"]:
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**{"type": "string", "format": "binary"}),
name, TypeSchema(**{"type": "string", "format": "binary"})
)
ref = f"#/components/schemas/{name}"
mime_type = "application/octet-stream"
elif name in [
"ImagesFileDownloadResponse",
]:
elif name in ["ImagesFileDownloadResponse"]:
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**{"type": "string", "format": "binary"}),
name, TypeSchema(**{"type": "string", "format": "binary"})
)
ref = f"#/components/schemas/{name}"
mime_type = "application/octet-stream"
@ -734,9 +721,9 @@ class GlanceGenerator(OpenStackServerSourceBase):
for field in i32_fixes:
res["properties"][field]["format"] = "int64"
elif name == "MetadefsNamespacesPropertiesListResponse":
res["properties"]["properties"]["additionalProperties"]["type"] = (
"object"
)
res["properties"]["properties"]["additionalProperties"][
"type"
] = "object"
return TypeSchema(**res)
@classmethod

View File

@ -100,24 +100,24 @@ class KeystoneGenerator(OpenStackServerSourceBase):
openapi_spec = self.load_openapi(impl_path)
if not openapi_spec:
openapi_spec = SpecSchema(
info=dict(
title="OpenStack Identity API",
description=LiteralScalarString(
info={
"title": "OpenStack Identity API",
"description": LiteralScalarString(
"Identity API provided by Keystone service"
),
version=self.api_version,
),
"version": self.api_version,
},
openapi="3.1.0",
security=[{"ApiKeyAuth": []}],
components=dict(
securitySchemes={
components={
"securitySchemes": {
"ApiKeyAuth": {
"type": "apiKey",
"in": "header",
"name": "X-Auth-Token",
}
},
headers={
"headers": {
"X-Auth-Token": {
"description": "A valid authentication token",
"schema": {"type": "string", "format": "secret"},
@ -131,7 +131,7 @@ class KeystoneGenerator(OpenStackServerSourceBase):
"schema": {"type": "string"},
},
},
parameters={
"parameters": {
"X-Auth-Token": {
"in": "header",
"name": "X-Auth-Token",
@ -146,7 +146,7 @@ class KeystoneGenerator(OpenStackServerSourceBase):
"required": True,
},
},
),
},
)
for route in self.router.iter_rules():
@ -158,7 +158,9 @@ class KeystoneGenerator(OpenStackServerSourceBase):
self._sanitize_param_ver_info(openapi_spec, self.min_api_version)
if args.api_ref_src:
merge_api_ref_doc(openapi_spec, args.api_ref_src, allow_strip_version=False)
merge_api_ref_doc(
openapi_spec, args.api_ref_src, allow_strip_version=False
)
self.dump_openapi(openapi_spec, impl_path, args.validate)
@ -205,10 +207,12 @@ class KeystoneGenerator(OpenStackServerSourceBase):
for path_element in path_elements:
if "{" in path_element:
param_name = path_element.strip("{}")
global_param_name = "_".join(path_resource_names) + f"_{param_name}"
global_param_name = (
"_".join(path_resource_names) + f"_{param_name}"
)
param_ref_name = f"#/components/parameters/{global_param_name}"
# Ensure reference to the param is in the path_params
if param_ref_name not in [k.ref for k in [p for p in path_params]]:
if param_ref_name not in [k.ref for k in list(path_params)]:
path_params.append(ParameterSchema(ref=param_ref_name))
# Ensure global parameter is present
path_param = ParameterSchema(
@ -222,17 +226,25 @@ class KeystoneGenerator(OpenStackServerSourceBase):
# We can only assume the param type. For path it is logically a string only
path_param.type_schema = TypeSchema(type="string")
# For non /users/{id} urls link user_id path attribute to the user resource
if path_param.name == "user_id" and path_resource_names != ["users"]:
if path_param.name == "user_id" and path_resource_names != [
"users"
]:
if not path_param.openstack:
path_param.openstack = {}
path_param.openstack["resource_link"] = "identity/v3/user.id"
path_param.openstack["resource_link"] = (
"identity/v3/user.id"
)
if path_param.name == "domain_id" and path_resource_names != [
"domains"
]:
if not path_param.openstack:
path_param.openstack = {}
path_param.openstack["resource_link"] = "identity/v3/domain.id"
openapi_spec.components.parameters[global_param_name] = path_param
path_param.openstack["resource_link"] = (
"identity/v3/domain.id"
)
openapi_spec.components.parameters[global_param_name] = (
path_param
)
if len(path_elements) == 0:
path_resource_names.append("root")
elif path_elements[-1].startswith("{"):
@ -263,13 +275,17 @@ class KeystoneGenerator(OpenStackServerSourceBase):
elif path == "/v3":
operation_id_prefix = "version"
else:
operation_id_prefix = "/".join([x.strip("{}") for x in path_elements])
operation_id_prefix = "/".join(
[x.strip("{}") for x in path_elements]
)
for method in route.methods:
if method == "OPTIONS":
# Not sure what should be done with it
continue
if controller:
func = getattr(controller, method.replace("HEAD", "GET").lower(), None)
func = getattr(
controller, method.replace("HEAD", "GET").lower(), None
)
else:
func = view
# Set operationId
@ -350,11 +366,7 @@ class KeystoneGenerator(OpenStackServerSourceBase):
*,
method=None,
):
logging.info(
"Operation: %s [%s]",
path,
method,
)
logging.info("Operation: %s [%s]", path, method)
doc = inspect.getdoc(func)
if doc and not operation_spec.description:
doc = rst_to_md(doc)
@ -368,25 +380,24 @@ class KeystoneGenerator(OpenStackServerSourceBase):
end_version = None
ser_schema: dict | None = {}
(
query_params_versions,
body_schemas,
ser_schema,
expected_errors,
) = self._process_decorators(
func,
path_resource_names,
openapi_spec,
method,
start_version,
end_version,
None,
(query_params_versions, body_schemas, ser_schema, expected_errors) = (
self._process_decorators(
func,
path_resource_names,
openapi_spec,
method,
start_version,
end_version,
None,
)
)
if query_params_versions:
so = sorted(
query_params_versions,
key=lambda d: (tuple(map(int, d[1].split("."))) if d[1] else (0, 0)),
key=lambda d: (
tuple(map(int, d[1].split("."))) if d[1] else (0, 0)
),
)
for data, min_ver, max_ver in so:
self.process_query_parameters(
@ -411,7 +422,7 @@ class KeystoneGenerator(OpenStackServerSourceBase):
responses_spec = operation_spec.responses
# Errors
for error in ["403", "404"]:
responses_spec.setdefault(str(error), dict(description="Error"))
responses_spec.setdefault(str(error), {"description": "Error"})
# Response data
if method == "POST":
response_code = "201"
@ -438,7 +449,7 @@ class KeystoneGenerator(OpenStackServerSourceBase):
response_code = "204"
elif path == "/v3/users/{user_id}/password" and method == "POST":
response_code = "204"
rsp = responses_spec.setdefault(response_code, dict(description="Ok"))
rsp = responses_spec.setdefault(response_code, {"description": "Ok"})
if response_code != "204" and method not in ["DELETE", "HEAD"]:
# Arrange response placeholder
schema_name = (
@ -470,7 +481,9 @@ class KeystoneGenerator(OpenStackServerSourceBase):
operation_spec.security = []
elif method == "GET":
operation_spec.parameters.append(
ParameterSchema(ref="#/components/parameters/X-Subject-Token")
ParameterSchema(
ref="#/components/parameters/X-Subject-Token"
)
)
rsp_headers.setdefault(
"X-Subject-Token",
@ -482,7 +495,9 @@ class KeystoneGenerator(OpenStackServerSourceBase):
if tag not in [x["name"] for x in openapi_spec.tags]:
openapi_spec.tags.append({"name": tag, "description": None})
self._post_process_operation_hook(openapi_spec, operation_spec, path=path)
self._post_process_operation_hook(
openapi_spec, operation_spec, path=path
)
def _post_process_operation_hook(
self, openapi_spec, operation_spec, path: str | None = None

View File

@ -38,7 +38,7 @@ APPLICATION_CREDENTIAL_ACCESS_RULE_SCHEMA: dict[str, Any] = {
"properties": {
"access_rule": copy.deepcopy(
application_credential_schema._access_rules_properties["items"]
),
)
},
}
@ -83,9 +83,9 @@ APPLICATION_CREDENTIAL_CREATE_RESPONSE_SCHEMA: dict[str, Any] = {
},
}
# Update `secret` field
APPLICATION_CREDENTIAL_CREATE_RESPONSE_SCHEMA["properties"]["application_credential"][
"properties"
]["secret"] = {
APPLICATION_CREDENTIAL_CREATE_RESPONSE_SCHEMA["properties"][
"application_credential"
]["properties"]["secret"] = {
"type": "string",
"description": "The secret for the application credential, either generated by the server or provided by the user. This is only ever shown once in the response to a create request. It is not stored nor ever shown again. If the secret is lost, a new application credential must be created.",
}
@ -96,7 +96,7 @@ APPLICATION_CREDENTIALS_SCHEMA: dict[str, Any] = {
"application_credentials": {
"type": "array",
"items": copy.deepcopy(APPLICATION_CREDENTIAL_SCHEMA),
},
}
},
}
@ -106,70 +106,59 @@ APPLICATION_CREDENTIALS_LIST_PARAMETERS = {
"name": "name",
"description": "The name of the application credential. Must be unique to a user.",
"schema": {"type": "string"},
},
}
}
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
def _post_process_operation_hook(
openapi_spec, operation_spec, path: str | None = None
):
"""Hook to allow service specific generator to modify details"""
operationId = operation_spec.operationId
if operationId == "users/user_id/application_credentials:get":
for (
key,
val,
) in APPLICATION_CREDENTIALS_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
for key, val in APPLICATION_CREDENTIALS_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(
key, ParameterSchema(**val)
)
ref = f"#/components/parameters/{key}"
if ref not in [x.ref for x in operation_spec.parameters]:
operation_spec.parameters.append(ParameterSchema(ref=ref))
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str
# ### Application Credentials
if name == "UsersAccess_RuleGetResponse":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**APPLICATION_CREDENTIAL_ACCESS_RULE_SCHEMA),
name, TypeSchema(**APPLICATION_CREDENTIAL_ACCESS_RULE_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name == "UsersAccess_RulesGetResponse":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**APPLICATION_CREDENTIAL_ACCESS_RULES_SCHEMA),
name, TypeSchema(**APPLICATION_CREDENTIAL_ACCESS_RULES_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name == "UsersApplication_CredentialsGetResponse":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**APPLICATION_CREDENTIALS_SCHEMA),
name, TypeSchema(**APPLICATION_CREDENTIALS_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in [
"UsersApplication_CredentialGetResponse",
]:
elif name in ["UsersApplication_CredentialGetResponse"]:
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**APPLICATION_CREDENTIAL_CONTAINER_SCHEMA),
name, TypeSchema(**APPLICATION_CREDENTIAL_CONTAINER_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name == "UsersApplication_CredentialsPostRequest":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**APPLICATION_CREDENTIAL_CREATE_SCHEMA),
name, TypeSchema(**APPLICATION_CREDENTIAL_CREATE_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name == "UsersApplication_CredentialsPostResponse":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**APPLICATION_CREDENTIAL_CREATE_RESPONSE_SCHEMA),
name, TypeSchema(**APPLICATION_CREDENTIAL_CREATE_RESPONSE_SCHEMA)
)
ref = f"#/components/schemas/{name}"

View File

@ -27,14 +27,8 @@ SCOPE_SCHEMA: dict[str, Any] = {
"project": {
"type": "object",
"properties": {
"name": {
"type": "string",
"description": "Project Name",
},
"id": {
"type": "string",
"description": "Project Id",
},
"name": {"type": "string", "description": "Project Name"},
"id": {"type": "string", "description": "Project Id"},
"domain": {
"type": "object",
"properties": {
@ -53,29 +47,17 @@ SCOPE_SCHEMA: dict[str, Any] = {
"domain": {
"type": "object",
"properties": {
"id": {
"type": "string",
"description": "Domain id",
},
"name": {
"type": "string",
"description": "Domain name",
},
"id": {"type": "string", "description": "Domain id"},
"name": {"type": "string", "description": "Domain name"},
},
},
"OS-TRUST:trust": {
"type": "object",
"properties": {
"id": {
"type": "string",
},
},
"properties": {"id": {"type": "string"}},
},
"system": {
"type": "object",
"properties": {
"all": {"type": "boolean"},
},
"properties": {"all": {"type": "boolean"}},
},
},
}
@ -131,7 +113,7 @@ AUTH_TOKEN_ISSUE_SCHEMA: dict[str, Any] = replace_refs(
"$ref": "#/definitions/user_domain"
},
},
},
}
},
},
"token": {
@ -142,11 +124,9 @@ AUTH_TOKEN_ISSUE_SCHEMA: dict[str, Any] = replace_refs(
"type": "string",
"format": "password",
"description": "Authorization Token value",
},
}
},
"required": [
"id",
],
"required": ["id"],
},
"totp": {
"type": "object",
@ -173,11 +153,9 @@ AUTH_TOKEN_ISSUE_SCHEMA: dict[str, Any] = replace_refs(
},
},
"required": ["passcode"],
},
}
},
"required": [
"user",
],
"required": ["user"],
},
"application_credential": {
"type": "object",
@ -217,32 +195,25 @@ AUTH_TOKEN_ISSUE_SCHEMA: dict[str, Any] = replace_refs(
"required": ["secret"],
},
},
"required": [
"methods",
],
"required": ["methods"],
},
"scope": SCOPE_SCHEMA,
},
"required": [
"identity",
],
},
"required": ["identity"],
}
},
"definitions": {
"user_domain": {
"type": "object",
"description": "User Domain object",
"properties": {
"id": {
"type": "string",
"description": "User Domain ID",
},
"id": {"type": "string", "description": "User Domain ID"},
"name": {
"type": "string",
"description": "User Domain Name",
},
},
},
}
},
},
proxies=False,
@ -454,9 +425,7 @@ AUTH_TOKEN_SCHEMA: dict[str, Any] = {
AUTH_SCOPED_TOKEN_SCHEMA: dict[str, Any] = copy.deepcopy(AUTH_TOKEN_SCHEMA)
AUTH_SCOPED_TOKEN_SCHEMA["properties"]["token"]["properties"].update(
**{
"is_domain": {
"type": "boolean",
},
"is_domain": {"type": "boolean"},
"domain": {
"type": "object",
"description": "A domain object including the id and name representing the domain the token is scoped to. This is only included in tokens that are scoped to a domain.",
@ -466,10 +435,7 @@ AUTH_SCOPED_TOKEN_SCHEMA["properties"]["token"]["properties"].update(
"format": "uuid",
"description": "A domain UUID",
},
"name": {
"type": "string",
"description": "A domain name",
},
"name": {"type": "string", "description": "A domain name"},
},
},
"project": {
@ -498,10 +464,7 @@ AUTH_SCOPED_TOKEN_SCHEMA["properties"]["token"]["properties"].update(
"format": "uuid",
"description": "A role UUID",
},
"name": {
"type": "string",
"description": "A role name",
},
"name": {"type": "string", "description": "A role name"},
},
},
},
@ -546,7 +509,9 @@ AUTH_RECEIPT_SCHEMA: dict[str, Any] = {
}
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
def _post_process_operation_hook(
openapi_spec, operation_spec, path: str | None = None
):
"""Hook to allow service specific generator to modify details"""
operationId = operation_spec.operationId
@ -561,16 +526,14 @@ def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None
"$ref": "#/components/headers/Openstack-Auth-Receipt"
}
},
"content": {receipt_mime_type: {"schema": {"$ref": receipt_schema_ref}}},
"content": {
receipt_mime_type: {"schema": {"$ref": receipt_schema_ref}}
},
}
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str
@ -578,14 +541,12 @@ def _get_schema_ref(
# Auth
if name == "AuthTokensPostRequest":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**AUTH_TOKEN_ISSUE_SCHEMA),
name, TypeSchema(**AUTH_TOKEN_ISSUE_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in ["AuthTokensGetResponse", "AuthTokensPostResponse"]:
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**AUTH_SCOPED_TOKEN_SCHEMA),
name, TypeSchema(**AUTH_SCOPED_TOKEN_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name == "AuthReceiptSchema":
@ -593,16 +554,12 @@ def _get_schema_ref(
name, TypeSchema(**AUTH_RECEIPT_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in [
"AuthProjectsGetResponse",
]:
elif name in ["AuthProjectsGetResponse"]:
openapi_spec.components.schemas.setdefault(
name, TypeSchema(**AUTH_PROJECTS_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in [
"AuthDomainsGetResponse",
]:
elif name in ["AuthDomainsGetResponse"]:
openapi_spec.components.schemas.setdefault(
name, TypeSchema(**AUTH_DOMAINS_SCHEMA)
)

View File

@ -33,7 +33,9 @@ LINKS_SCHEMA: dict[str, Any] = {
}
TAG_SCHEMA: dict[str, Any] = copy.deepcopy(ks_schema._project_tag_name_properties)
TAG_SCHEMA: dict[str, Any] = copy.deepcopy(
ks_schema._project_tag_name_properties
)
TAGS_SCHEMA: dict[str, Any] = {
"type": "object",

View File

@ -119,27 +119,24 @@ DOMAIN_LIST_PARAMETERS: dict[str, dict] = {
}
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
def _post_process_operation_hook(
openapi_spec, operation_spec, path: str | None = None
):
"""Hook to allow service specific generator to modify details"""
operationId = operation_spec.operationId
if operationId == "domains:get":
for (
key,
val,
) in DOMAIN_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
for key, val in DOMAIN_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(
key, ParameterSchema(**val)
)
ref = f"#/components/parameters/{key}"
if ref not in [x.ref for x in operation_spec.parameters]:
operation_spec.parameters.append(ParameterSchema(ref=ref))
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str
@ -156,7 +153,9 @@ def _get_schema_ref(
)
ref = "#/components/schemas/Domain"
elif name == "DomainsGetResponse":
openapi_spec.components.schemas.setdefault(name, TypeSchema(**DOMAINS_SCHEMA))
openapi_spec.components.schemas.setdefault(
name, TypeSchema(**DOMAINS_SCHEMA)
)
ref = f"#/components/schemas/{name}"
# Domain Config
@ -171,8 +170,7 @@ def _get_schema_ref(
"DomainsConfigDefaultGetResponse",
]:
openapi_spec.components.schemas.setdefault(
"DomainConfig",
TypeSchema(**DOMAIN_CONFIGS_SCHEMA),
"DomainConfig", TypeSchema(**DOMAIN_CONFIGS_SCHEMA)
)
ref = "#/components/schemas/DomainConfig"
elif name in [
@ -184,8 +182,7 @@ def _get_schema_ref(
"DomainsConfigGroupPatchResponse",
]:
openapi_spec.components.schemas.setdefault(
"DomainConfigGroup",
TypeSchema(**DOMAIN_CONFIG_GROUP_SCHEMA),
"DomainConfigGroup", TypeSchema(**DOMAIN_CONFIG_GROUP_SCHEMA)
)
ref = "#/components/schemas/DomainConfigGroup"

View File

@ -90,7 +90,9 @@ ENDPOINTS_LIST_PARAMETERS = {
},
}
ENDPOINT_CREATE_SCHEMA: dict[str, Any] = copy.deepcopy(ENDPOINT_CONTAINER_SCHEMA)
ENDPOINT_CREATE_SCHEMA: dict[str, Any] = copy.deepcopy(
ENDPOINT_CONTAINER_SCHEMA
)
ENDPOINT_CREATE_SCHEMA["properties"]["endpoint"]["properties"].pop("id")
ENDPOINT_CREATE_SCHEMA["properties"]["endpoint"]["required"] = [
"interface",
@ -99,34 +101,30 @@ ENDPOINT_CREATE_SCHEMA["properties"]["endpoint"]["required"] = [
]
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
def _post_process_operation_hook(
openapi_spec, operation_spec, path: str | None = None
):
"""Hook to allow service specific generator to modify details"""
operationId = operation_spec.operationId
if operationId == "endpoints:get":
for (
key,
val,
) in ENDPOINTS_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
for key, val in ENDPOINTS_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(
key, ParameterSchema(**val)
)
ref = f"#/components/parameters/{key}"
if ref not in [x.ref for x in operation_spec.parameters]:
operation_spec.parameters.append(ParameterSchema(ref=ref))
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str
# ### Endpoints
if name == "EndpointsGetResponse":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**ENDPOINTS_SCHEMA),
name, TypeSchema(**ENDPOINTS_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in [
@ -136,14 +134,12 @@ def _get_schema_ref(
"EndpointPatchResponse",
]:
openapi_spec.components.schemas.setdefault(
"Endpoint",
TypeSchema(**ENDPOINT_CONTAINER_SCHEMA),
"Endpoint", TypeSchema(**ENDPOINT_CONTAINER_SCHEMA)
)
ref = "#/components/schemas/Endpoint"
elif name == "EndpointsPostRequest":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**ENDPOINT_CREATE_SCHEMA),
name, TypeSchema(**ENDPOINT_CREATE_SCHEMA)
)
ref = f"#/components/schemas/{name}"

View File

@ -61,12 +61,16 @@ IDENTITY_PROVIDER_CONTAINER_SCHEMA: dict[str, Any] = {
IDENTITY_PROVIDER_CREATE_SCHEMA: dict[str, Any] = {
"type": "object",
"properties": {"identity_provider": federation_schema.identity_provider_create},
"properties": {
"identity_provider": federation_schema.identity_provider_create
},
}
IDENTITY_PROVIDER_UPDATE_SCHEMA: dict[str, Any] = {
"type": "object",
"properties": {"identity_provider": federation_schema.identity_provider_update},
"properties": {
"identity_provider": federation_schema.identity_provider_update
},
}
IDENTITY_PROVIDERS_SCHEMA: dict[str, Any] = {
@ -133,8 +137,7 @@ IDENTITY_PROVIDER_PROTOCOL_UPDATE_SCHEMA: dict[str, Any] = {
}
MAPPING_PROPERTIES = replace_refs(
federation_mapping_schema.IDP_ATTRIBUTE_MAPPING_SCHEMA_2_0,
proxies=False,
federation_mapping_schema.IDP_ATTRIBUTE_MAPPING_SCHEMA_2_0, proxies=False
)
MAPPING_PROPERTIES.pop("definitions", None)
MAPPING_PROPERTIES["properties"]["schema_version"] = {
@ -215,24 +218,29 @@ FEDERATION_SERVICE_PROVIDERS_SCHEMA: dict[str, Any] = {
FEDERATION_SERVICE_PROVIDER_CREATE_SCHEMA: dict[str, Any] = {
"type": "object",
"properties": {"service_provider": federation_schema.service_provider_create},
"properties": {
"service_provider": federation_schema.service_provider_create
},
}
FEDERATION_SERVICE_PROVIDER_UPDATE_SCHEMA: dict[str, Any] = {
"type": "object",
"properties": {"service_provider": federation_schema.service_provider_update},
"properties": {
"service_provider": federation_schema.service_provider_update
},
}
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
def _post_process_operation_hook(
openapi_spec, operation_spec, path: str | None = None
):
"""Hook to allow service specific generator to modify details"""
operationId = operation_spec.operationId
if operationId == "OS-FEDERATION/identity_providers:get":
for (
key,
val,
) in IDENTITY_PROVIDERS_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
for key, val in IDENTITY_PROVIDERS_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(
key, ParameterSchema(**val)
)
ref = f"#/components/parameters/{key}"
if ref not in [x.ref for x in operation_spec.parameters]:
operation_spec.parameters.append(ParameterSchema(ref=ref))
@ -247,11 +255,7 @@ def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str | None
@ -260,9 +264,7 @@ def _get_schema_ref(
name, TypeSchema(**auth.AUTH_PROJECTS_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in [
"Os_FederationDomainsGetResponse",
]:
elif name in ["Os_FederationDomainsGetResponse"]:
openapi_spec.components.schemas.setdefault(
name, TypeSchema(**auth.AUTH_DOMAINS_SCHEMA)
)
@ -272,8 +274,7 @@ def _get_schema_ref(
"AuthOs_FederationSaml2EcpPostRequest",
]:
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**auth.AUTH_TOKEN_ISSUE_SCHEMA),
name, TypeSchema(**auth.AUTH_TOKEN_ISSUE_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in [
@ -312,8 +313,7 @@ def _get_schema_ref(
# ### Identity provider
elif name == "Os_FederationIdentity_ProvidersGetResponse":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**IDENTITY_PROVIDERS_SCHEMA),
name, TypeSchema(**IDENTITY_PROVIDERS_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in [
@ -322,27 +322,23 @@ def _get_schema_ref(
"Os_FederationIdentity_ProviderPatchResponse",
]:
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**IDENTITY_PROVIDER_CONTAINER_SCHEMA),
name, TypeSchema(**IDENTITY_PROVIDER_CONTAINER_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name == "Os_FederationIdentity_ProviderPutRequest":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**IDENTITY_PROVIDER_CREATE_SCHEMA),
name, TypeSchema(**IDENTITY_PROVIDER_CREATE_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name == "Os_FederationIdentity_ProviderPatchRequest":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**IDENTITY_PROVIDER_UPDATE_SCHEMA),
name, TypeSchema(**IDENTITY_PROVIDER_UPDATE_SCHEMA)
)
ref = f"#/components/schemas/{name}"
# ### Identity provider protocols
elif name == "Os_FederationIdentity_ProvidersProtocolsGetResponse":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**IDENTITY_PROVIDER_PROTOCOLS_SCHEMA),
name, TypeSchema(**IDENTITY_PROVIDER_PROTOCOLS_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in [
@ -351,27 +347,23 @@ def _get_schema_ref(
"Os_FederationIdentity_ProvidersProtocolPatchResponse",
]:
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**IDENTITY_PROVIDER_PROTOCOL_CONTAINER_SCHEMA),
name, TypeSchema(**IDENTITY_PROVIDER_PROTOCOL_CONTAINER_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name == "Os_FederationIdentity_ProvidersProtocolPutRequest":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**IDENTITY_PROVIDER_PROTOCOL_CREATE_SCHEMA),
name, TypeSchema(**IDENTITY_PROVIDER_PROTOCOL_CREATE_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name == "Os_FederationIdentity_ProvidersProtocolPatchRequest":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**IDENTITY_PROVIDER_PROTOCOL_UPDATE_SCHEMA),
name, TypeSchema(**IDENTITY_PROVIDER_PROTOCOL_UPDATE_SCHEMA)
)
ref = f"#/components/schemas/{name}"
# ### Identity provider mapping
elif name == "Os_FederationMappingsGetResponse":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**MAPPINGS_SCHEMA),
name, TypeSchema(**MAPPINGS_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in [
@ -380,8 +372,7 @@ def _get_schema_ref(
"Os_FederationMappingPatchResponse",
]:
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**MAPPING_CONTAINER_SCHEMA),
name, TypeSchema(**MAPPING_CONTAINER_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in [
@ -389,15 +380,13 @@ def _get_schema_ref(
"Os_FederationMappingPatchRequest",
]:
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**MAPPING_CREATE_SCHEMA),
name, TypeSchema(**MAPPING_CREATE_SCHEMA)
)
ref = f"#/components/schemas/{name}"
# ### Identity provider service provider
elif name == "Os_FederationService_ProvidersGetResponse":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**FEDERATION_SERVICE_PROVIDERS_SCHEMA),
name, TypeSchema(**FEDERATION_SERVICE_PROVIDERS_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in [
@ -406,20 +395,17 @@ def _get_schema_ref(
"Os_FederationService_ProviderPatchResponse",
]:
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**FEDERATION_SERVICE_PROVIDER_CONTAINER_SCHEMA),
name, TypeSchema(**FEDERATION_SERVICE_PROVIDER_CONTAINER_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name == "Os_FederationService_ProviderPutRequest":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**FEDERATION_SERVICE_PROVIDER_CREATE_SCHEMA),
name, TypeSchema(**FEDERATION_SERVICE_PROVIDER_CREATE_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name == "Os_FederationService_ProviderPatchRequest":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**FEDERATION_SERVICE_PROVIDER_UPDATE_SCHEMA),
name, TypeSchema(**FEDERATION_SERVICE_PROVIDER_UPDATE_SCHEMA)
)
ref = f"#/components/schemas/{name}"
# SAML2 Metadata

View File

@ -43,7 +43,7 @@ GROUPS_LIST_PARAMETERS: dict[str, Any] = {
"name": "domain_id",
"description": "Filters the response by a domain ID.",
"schema": {"type": "string", "format": "uuid"},
},
}
}
GROUP_USERS_LIST_PARAMETERS: dict[str, Any] = {
@ -52,41 +52,45 @@ GROUP_USERS_LIST_PARAMETERS: dict[str, Any] = {
"name": "password_expires_at",
"description": "Filter results based on which user passwords have expired. The query should include an operator and a timestamp with a colon (:) separating the two, for example: `password_expires_at={operator}:{timestamp}`.\nValid operators are: `lt`, `lte`, `gt`, `gte`, `eq`, and `neq`.\nValid timestamps are of the form: YYYY-MM-DDTHH:mm:ssZ.",
"schema": {"type": "string", "format": "date-time"},
},
}
}
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
def _post_process_operation_hook(
openapi_spec, operation_spec, path: str | None = None
):
"""Hook to allow service specific generator to modify details"""
operationId = operation_spec.operationId
if operationId == "groups:get":
for key, val in GROUPS_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
openapi_spec.components.parameters.setdefault(
key, ParameterSchema(**val)
)
ref = f"#/components/parameters/{key}"
if ref not in [x.ref for x in operation_spec.parameters]:
operation_spec.parameters.append(ParameterSchema(ref=ref))
elif operationId == "groups/group_id/users:get":
for key, val in GROUP_USERS_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
openapi_spec.components.parameters.setdefault(
key, ParameterSchema(**val)
)
ref = f"#/components/parameters/{key}"
if ref not in [x.ref for x in operation_spec.parameters]:
operation_spec.parameters.append(ParameterSchema(ref=ref))
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str
# Groups
if name == "GroupsGetResponse":
openapi_spec.components.schemas.setdefault(name, TypeSchema(**GROUPS_SCHEMA))
openapi_spec.components.schemas.setdefault(
name, TypeSchema(**GROUPS_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in [
"GroupsPostRequest",

View File

@ -38,7 +38,7 @@ PROJECT_CONTAINER_SCHEMA: dict[str, Any] = {
**ks_schema._project_properties,
},
"additionalProperties": True,
},
}
},
}
@ -83,26 +83,23 @@ PROJECT_LIST_PARAMETERS = {
}
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
def _post_process_operation_hook(
openapi_spec, operation_spec, path: str | None = None
):
"""Hook to allow service specific generator to modify details"""
operationId = operation_spec.operationId
if operationId == "projects:get":
for (
key,
val,
) in PROJECT_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
for key, val in PROJECT_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(
key, ParameterSchema(**val)
)
ref = f"#/components/parameters/{key}"
if ref not in [x.ref for x in operation_spec.parameters]:
operation_spec.parameters.append(ParameterSchema(ref=ref))
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str
@ -115,12 +112,13 @@ def _get_schema_ref(
"ProjectGetResponse",
]:
openapi_spec.components.schemas.setdefault(
"Project",
TypeSchema(**PROJECT_CONTAINER_SCHEMA),
"Project", TypeSchema(**PROJECT_CONTAINER_SCHEMA)
)
ref = "#/components/schemas/Project"
elif name == "ProjectsGetResponse":
openapi_spec.components.schemas.setdefault(name, TypeSchema(**PROJECTS_SCHEMA))
openapi_spec.components.schemas.setdefault(
name, TypeSchema(**PROJECTS_SCHEMA)
)
ref = f"#/components/schemas/{name}"
# Project Tags

View File

@ -52,38 +52,34 @@ REGIONS_LIST_PARAMETERS = {
"name": "parent_region_id",
"description": "Filters the response by a parent region, by ID.",
"schema": {"type": "string", "format": "uuid"},
},
}
}
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
def _post_process_operation_hook(
openapi_spec, operation_spec, path: str | None = None
):
"""Hook to allow service specific generator to modify details"""
operationId = operation_spec.operationId
if operationId == "regions:get":
for (
key,
val,
) in REGIONS_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
for key, val in REGIONS_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(
key, ParameterSchema(**val)
)
ref = f"#/components/parameters/{key}"
if ref not in [x.ref for x in operation_spec.parameters]:
operation_spec.parameters.append(ParameterSchema(ref=ref))
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str
# ### Regions
if name == "RegionsGetResponse":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**REGIONS_SCHEMA),
name, TypeSchema(**REGIONS_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in [
@ -94,8 +90,7 @@ def _get_schema_ref(
"RegionPatchResponse",
]:
openapi_spec.components.schemas.setdefault(
"Region",
TypeSchema(**REGION_CONTAINER_SCHEMA),
"Region", TypeSchema(**REGION_CONTAINER_SCHEMA)
)
ref = "#/components/schemas/Region"

View File

@ -48,10 +48,7 @@ ROLE_INFO_SCHEMA: dict[str, Any] = {
"format": "uuid",
"description": "The role ID.",
},
"name": {
"type": "string",
"description": "The role name.",
},
"name": {"type": "string", "description": "The role name."},
"description": {
"type": "string",
"description": "The role description.",
@ -111,7 +108,7 @@ ROLE_LIST_PARAMETERS: dict[str, Any] = {
"name": "domain_id",
"description": "Filters the response by a domain ID.",
"schema": {"type": "string", "format": "uuid"},
},
}
}
@ -135,10 +132,7 @@ ROLES_INFERENCE_SCHEMA: dict[str, Any] = {
"type": "object",
"properties": {
"prior_role": ROLE_INFO_SCHEMA,
"implies": {
"type": "array",
"items": ROLE_INFO_SCHEMA,
},
"implies": {"type": "array", "items": ROLE_INFO_SCHEMA},
},
}
},
@ -153,10 +147,7 @@ ROLES_INFERENCES_SCHEMA: dict[str, Any] = {
"type": "object",
"properties": {
"prior_role": ROLE_INFO_SCHEMA,
"implies": {
"type": "array",
"items": ROLE_INFO_SCHEMA,
},
"implies": {"type": "array", "items": ROLE_INFO_SCHEMA},
},
},
}
@ -213,10 +204,7 @@ ROLE_ASSIGNMENT_SCHEMA: dict[str, Any] = {
"format": "uri",
"description": "a link to the assignment that gave rise to this entity",
},
"membership": {
"type": "string",
"format": "uri",
},
"membership": {"type": "string", "format": "uri"},
},
},
},
@ -298,16 +286,17 @@ ROLE_ASSIGNMENT_LIST_PARAMETERS: dict[str, Any] = {
}
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
def _post_process_operation_hook(
openapi_spec, operation_spec, path: str | None = None
):
"""Hook to allow service specific generator to modify details"""
operationId = operation_spec.operationId
if operationId == "roles:get":
for (
key,
val,
) in ROLE_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
for key, val in ROLE_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(
key, ParameterSchema(**val)
)
ref = f"#/components/parameters/{key}"
if ref not in [x.ref for x in operation_spec.parameters]:
@ -317,10 +306,7 @@ def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None
ROLE_ASSIGNMENTS_QUERY_PARAMETERS,
ROLE_ASSIGNMENT_LIST_PARAMETERS,
]:
for (
key,
val,
) in map.items():
for key, val in map.items():
openapi_spec.components.parameters.setdefault(
key, ParameterSchema(**val)
)
@ -329,11 +315,10 @@ def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None
if ref not in [x.ref for x in operation_spec.parameters]:
operation_spec.parameters.append(ParameterSchema(ref=ref))
elif operationId == "role_assignments:head":
for (
key,
val,
) in ROLE_ASSIGNMENTS_QUERY_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
for key, val in ROLE_ASSIGNMENTS_QUERY_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(
key, ParameterSchema(**val)
)
ref = f"#/components/parameters/{key}"
if ref not in [x.ref for x in operation_spec.parameters]:
@ -341,17 +326,15 @@ def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str
# Roles
if name == "RolesGetResponse":
openapi_spec.components.schemas.setdefault(name, TypeSchema(**ROLES_SCHEMA))
openapi_spec.components.schemas.setdefault(
name, TypeSchema(**ROLES_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in [
"RolesPostRequest",
@ -368,8 +351,7 @@ def _get_schema_ref(
# Role Implies
elif name == "RolesImpliesGetResponse":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**ROLES_INFERENCE_SCHEMA),
name, TypeSchema(**ROLES_INFERENCE_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name == "RolesImplyGetResponse":

View File

@ -33,10 +33,7 @@ SERVICE_SCHEMA: dict[str, Any] = {
"description": "The UUID of the service to which the endpoint belongs.",
"readOnly": True,
},
"name": {
"type": "string",
"description": "The service name.",
},
"name": {"type": "string", "description": "The service name."},
"type": {
"type": "string",
"description": "The service type, which describes the API implemented by the ",
@ -60,38 +57,34 @@ SERVICES_LIST_PARAMETERS = {
"name": "service",
"description": "Filters the response by a domain ID.",
"schema": {"type": "string"},
},
}
}
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
def _post_process_operation_hook(
openapi_spec, operation_spec, path: str | None = None
):
"""Hook to allow service specific generator to modify details"""
operationId = operation_spec.operationId
if operationId == "services:get":
for (
key,
val,
) in SERVICES_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
for key, val in SERVICES_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(
key, ParameterSchema(**val)
)
ref = f"#/components/parameters/{key}"
if ref not in [x.ref for x in operation_spec.parameters]:
operation_spec.parameters.append(ParameterSchema(ref=ref))
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str
# ### Services
if name == "ServicesGetResponse":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**SERVICES_SCHEMA),
name, TypeSchema(**SERVICES_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in [
@ -102,8 +95,7 @@ def _get_schema_ref(
"ServicePatchResponse",
]:
openapi_spec.components.schemas.setdefault(
"Service",
TypeSchema(**SERVICE_CONTAINER_SCHEMA),
"Service", TypeSchema(**SERVICE_CONTAINER_SCHEMA)
)
ref = "#/components/schemas/Service"

View File

@ -99,12 +99,12 @@ USER_PWD_CHANGE_SCHEMA: dict[str, Any] = {
}
# Set `password` format for password change operation
USER_PWD_CHANGE_SCHEMA["properties"]["user"]["properties"]["password"]["format"] = (
"password"
)
USER_PWD_CHANGE_SCHEMA["properties"]["user"]["properties"]["original_password"][
USER_PWD_CHANGE_SCHEMA["properties"]["user"]["properties"]["password"][
"format"
] = "password"
USER_PWD_CHANGE_SCHEMA["properties"]["user"]["properties"][
"original_password"
]["format"] = "password"
USER_GROUP_SCHEMA: dict[str, Any] = {
"type": "object",
@ -123,10 +123,7 @@ USER_GROUP_SCHEMA: dict[str, Any] = {
"format": "uuid",
"description": "The ID of the group.",
},
"name": {
"type": "string",
"description": "The name of the group.",
},
"name": {"type": "string", "description": "The name of the group."},
"membership_expires_at": {
"type": "string",
"format": "date-time",
@ -169,10 +166,7 @@ USER_PROJECT_SCHEMA: dict[str, Any] = {
"format": "uuid",
"description": "The parent id of the project.",
},
"name": {
"type": "string",
"description": "The name of the project.",
},
"name": {"type": "string", "description": "The name of the project."},
},
}
@ -188,24 +182,24 @@ USER_PROJECTS_SCHEMA: dict[str, Any] = {
}
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
def _post_process_operation_hook(
openapi_spec, operation_spec, path: str | None = None
):
"""Hook to allow service specific generator to modify details"""
operationId = operation_spec.operationId
if operationId == "users:get":
for key, val in USER_LIST_PARAMETERS.items():
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
openapi_spec.components.parameters.setdefault(
key, ParameterSchema(**val)
)
ref = f"#/components/parameters/{key}"
if ref not in [x.ref for x in operation_spec.parameters]:
operation_spec.parameters.append(ParameterSchema(ref=ref))
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
action_name=None,
openapi_spec, name, description=None, schema_def=None, action_name=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str
@ -221,7 +215,9 @@ def _get_schema_ref(
)
ref = f"#/components/schemas/{name}"
elif name == "UsersGetResponse":
openapi_spec.components.schemas.setdefault(name, TypeSchema(**USERS_SCHEMA))
openapi_spec.components.schemas.setdefault(
name, TypeSchema(**USERS_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name in ["UserGetResponse", "UserPostResponse", "UserPatchResponse"]:
openapi_spec.components.schemas.setdefault(
@ -230,8 +226,7 @@ def _get_schema_ref(
ref = f"#/components/schemas/{name}"
elif name == "UsersPasswordPostRequest":
openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**USER_PWD_CHANGE_SCHEMA),
name, TypeSchema(**USER_PWD_CHANGE_SCHEMA)
)
ref = f"#/components/schemas/{name}"
elif name == "UsersGroupsGetResponse":

View File

@ -15,17 +15,13 @@ from pathlib import Path
from ruamel.yaml.scalarstring import LiteralScalarString
from codegenerator.common.schema import (
SpecSchema,
)
from codegenerator.common.schema import SpecSchema
from codegenerator.openapi.base import OpenStackServerSourceBase
from codegenerator.openapi.utils import merge_api_ref_doc
class ManilaGenerator(OpenStackServerSourceBase):
URL_TAG_MAP = {
"/versions": "version",
}
URL_TAG_MAP = {"/versions": "version"}
def _api_ver_major(self, ver):
return ver._ver_major
@ -57,11 +53,15 @@ class ManilaGenerator(OpenStackServerSourceBase):
lock_path = self.useFixture(fixtures.TempDir()).path
self.fixture = self.useFixture(config_fixture.Config(lockutils.CONF))
self.fixture.config(lock_path=lock_path, group="oslo_concurrency")
self.fixture.config(disable_process_locking=True, group="oslo_concurrency")
self.fixture.config(
disable_process_locking=True, group="oslo_concurrency"
)
rpc.init(CONF)
CONF.set_override("backend_url", "file://" + lock_path, group="coordination")
CONF.set_override(
"backend_url", "file://" + lock_path, group="coordination"
)
coordination.LOCK_COORDINATOR.start()
# config = cfg.ConfigOpts()
@ -85,24 +85,24 @@ class ManilaGenerator(OpenStackServerSourceBase):
openapi_spec = self.load_openapi(impl_path)
if not openapi_spec:
openapi_spec = SpecSchema(
info=dict(
title="OpenStack Shared-File-System API",
description=LiteralScalarString(
info={
"title": "OpenStack Shared-File-System API",
"description": LiteralScalarString(
"Shared File System API provided by Manila service"
),
version=self.api_version,
),
"version": self.api_version,
},
openapi="3.1.0",
security=[{"ApiKeyAuth": []}],
components=dict(
securitySchemes={
components={
"securitySchemes": {
"ApiKeyAuth": {
"type": "apiKey",
"in": "header",
"name": "X-Auth-Token",
}
},
),
}
},
)
for route in self.router.map.matchlist:
@ -119,9 +119,7 @@ class ManilaGenerator(OpenStackServerSourceBase):
if args.api_ref_src:
merge_api_ref_doc(
openapi_spec,
args.api_ref_src,
allow_strip_version=False,
openapi_spec, args.api_ref_src, allow_strip_version=False
)
self.dump_openapi(openapi_spec, impl_path, args.validate)

View File

@ -210,17 +210,11 @@ class NeutronGenerator(OpenStackServerSourceBase):
fp.write(PASTE_CONFIG)
neutron_config.init([])
cfg.CONF.set_override(
"service_plugins",
[
"router",
"vpnaas",
],
)
cfg.CONF.set_override("service_plugins", ["router", "vpnaas"])
cfg.CONF.set_override(
"service_provider",
[
"VPN:dummy:neutron_vpnaas.tests.unit.dummy_ipsec.DummyIPsecVPNDriver:default",
"VPN:dummy:neutron_vpnaas.tests.unit.dummy_ipsec.DummyIPsecVPNDriver:default"
],
group="service_providers",
)
@ -261,27 +255,27 @@ class NeutronGenerator(OpenStackServerSourceBase):
openapi_spec = self.load_openapi(Path(impl_path))
if not openapi_spec:
openapi_spec = SpecSchema(
info=dict(
title="OpenStack Network API",
description=LiteralScalarString(
info={
"title": "OpenStack Network API",
"description": LiteralScalarString(
"Network API provided by Neutron service"
),
version=self.api_version,
),
"version": self.api_version,
},
openapi="3.1.0",
security=[{"ApiKeyAuth": []}],
tags=[],
paths={},
components=dict(
securitySchemes={
components={
"securitySchemes": {
"ApiKeyAuth": {
"type": "apiKey",
"in": "header",
"name": "X-Auth-Token",
}
},
headers={},
parameters={
"headers": {},
"parameters": {
"limit": ParameterSchema(
name="limit",
location="query",
@ -310,11 +304,13 @@ class NeutronGenerator(OpenStackServerSourceBase):
name="sort_dir",
location="query",
description="Sort direction. This is an optional feature and may be silently ignored by the server.",
type_schema=TypeSchema(type="string", enum=["asc", "desc"]),
type_schema=TypeSchema(
type="string", enum=["asc", "desc"]
),
),
},
schemas={},
),
"schemas": {},
},
)
lnk = Path(impl_path.parent, "v2.yaml")
lnk.unlink(missing_ok=True)
@ -363,7 +359,9 @@ class NeutronGenerator(OpenStackServerSourceBase):
# merge descriptions from api-ref doc
if args.api_ref_src:
merge_api_ref_doc(openapi_spec, args.api_ref_src, allow_strip_version=False)
merge_api_ref_doc(
openapi_spec, args.api_ref_src, allow_strip_version=False
)
self.dump_openapi(openapi_spec, Path(impl_path), args.validate)
@ -378,39 +376,50 @@ class NeutronGenerator(OpenStackServerSourceBase):
# continue
# if "networks" not in route.routepath:
# continue
if route.routepath.endswith("/edit") or route.routepath.endswith("/new"):
if route.routepath.endswith("/edit") or route.routepath.endswith(
"/new"
):
# NEUTRON folks - please fix
logging.warning("Skipping processing %s route", route.routepath)
logging.warning(
"Skipping processing %s route", route.routepath
)
continue
if "/qos/ports" in route.routepath or "/qos/networks" in route.routepath:
if (
"/qos/ports" in route.routepath
or "/qos/networks" in route.routepath
):
# NEUTRON folks - please fix
logging.warning("Skipping processing %s route", route.routepath)
logging.warning(
"Skipping processing %s route", route.routepath
)
continue
if (
route.routepath.endswith("/tags")
and route.conditions["method"][0] == "POST"
):
logging.warning("Skipping processing POST %s route", route.routepath)
logging.warning(
"Skipping processing POST %s route", route.routepath
)
continue
if route.routepath.startswith("/extensions") and route.conditions["method"][
0
] in ["POST", "DELETE", "PUT"]:
continue
if route.routepath.startswith("/availability_zones") and route.conditions[
if route.routepath.startswith("/extensions") and route.conditions[
"method"
][0] in ["POST", "DELETE", "PUT"]:
continue
if route.routepath.startswith("/availability_zones/") and route.conditions[
"method"
][0] in ["GET"]:
if route.routepath.startswith(
"/availability_zones"
) and route.conditions["method"][0] in ["POST", "DELETE", "PUT"]:
continue
if route.routepath.startswith(
"/availability_zones/"
) and route.conditions["method"][0] in ["GET"]:
# There is no "show" for AZ
continue
if route.routepath in ["/quotas/tenant", "/quotas/project"]:
# Tenant and Project quota are not a thing
continue
if route.routepath == "/quotas" and route.conditions["method"][0] in [
"POST"
]:
if route.routepath == "/quotas" and route.conditions["method"][
0
] in ["POST"]:
# Tenant and Project quota is the same
continue
@ -496,8 +505,12 @@ class NeutronGenerator(OpenStackServerSourceBase):
path += part
if "method" not in route.conditions:
raise RuntimeError("Method not set for %s" % route)
method = route.conditions.get("method", "GET")[0] if route.conditions else "GET"
raise RuntimeError(f"Method not set for {route}")
method = (
route.conditions.get("method", "GET")[0]
if route.conditions
else "GET"
)
wsgi_controller = controller or route.defaults["controller"]
# collection_name = route.collection_name
@ -523,7 +536,9 @@ class NeutronGenerator(OpenStackServerSourceBase):
logging.warning("Skipping duplicated route %s", processed_key)
return
logging.info("Path: %s; method: %s; operation: %s", path, method, action)
logging.info(
"Path: %s; method: %s; operation: %s", path, method, action
)
# Get Path elements
path_elements: list[str] = list(filter(None, path.split("/")))
@ -554,15 +569,17 @@ class NeutronGenerator(OpenStackServerSourceBase):
for path_element in path_elements:
if "{" in path_element:
param_name = path_element.strip("{}")
global_param_name = f"{global_param_name_prefix}_{param_name}".replace(
":", "_"
global_param_name = (
f"{global_param_name_prefix}_{param_name}".replace(
":", "_"
)
)
if global_param_name == "_project_id":
global_param_name = "project_id"
param_ref_name = f"#/components/parameters/{global_param_name}"
# Ensure reference to the param is in the path_params
if param_ref_name not in [k.ref for k in [p for p in path_params]]:
if param_ref_name not in [k.ref for k in list(path_params)]:
path_params.append(ParameterSchema(ref=param_ref_name))
# Ensure global parameter is present
path_param = ParameterSchema(
@ -570,10 +587,14 @@ class NeutronGenerator(OpenStackServerSourceBase):
)
# openapi_spec.components["parameters"].setdefault(global_param_name, dict())
if not path_param.description:
path_param.description = f"{param_name} parameter for {path} API"
path_param.description = (
f"{param_name} parameter for {path} API"
)
# We can only assume the param type. For path it is logically a string only
path_param.type_schema = TypeSchema(type="string")
openapi_spec.components.parameters[global_param_name] = path_param
openapi_spec.components.parameters[global_param_name] = (
path_param
)
else:
path_resource_names.append(path_element.replace("-", "_"))
@ -591,7 +612,8 @@ class NeutronGenerator(OpenStackServerSourceBase):
operation_id = re.sub(
r"^(/?v[0-9.]*/)",
"",
"/".join([x.strip("{}") for x in path_elements]) + f":{method.lower()}", # noqa
"/".join([x.strip("{}") for x in path_elements])
+ f":{method.lower()}", # noqa
)
path_spec = openapi_spec.paths.setdefault(
@ -629,10 +651,7 @@ class NeutronGenerator(OpenStackServerSourceBase):
method=None,
path=None,
):
logging.info(
"Operation: %s",
operation_name,
)
logging.info("Operation: %s", operation_name)
attr_info = getattr(controller, "_attr_info", {})
collection = getattr(controller, "_collection", None)
@ -675,16 +694,22 @@ class NeutronGenerator(OpenStackServerSourceBase):
for field, data in attr_info.items():
# operation_spec.setdefault("parameters", [])
if data.get("is_filter", False):
global_param_name = f"{collection}_{field}".replace(":", "_")
param_ref_name = f"#/components/parameters/{global_param_name}"
global_param_name = f"{collection}_{field}".replace(
":", "_"
)
param_ref_name = (
f"#/components/parameters/{global_param_name}"
)
# Ensure global parameter is present
query_param = openapi_spec.components.parameters.setdefault(
global_param_name,
ParameterSchema(
location="query",
name=field,
type_schema=get_schema(data),
),
query_param = (
openapi_spec.components.parameters.setdefault(
global_param_name,
ParameterSchema(
location="query",
name=field,
type_schema=get_schema(data),
),
)
)
if not query_param.description:
query_param.description = (
@ -715,7 +740,9 @@ class NeutronGenerator(OpenStackServerSourceBase):
)
query_param.style = "form"
query_param.explode = False
if param_ref_name not in [x.ref for x in operation_spec.parameters]:
if param_ref_name not in [
x.ref for x in operation_spec.parameters
]:
operation_spec.parameters.append(
ParameterSchema(ref=param_ref_name)
)
@ -755,7 +782,9 @@ class NeutronGenerator(OpenStackServerSourceBase):
response_code = "204"
if response_code:
rsp = responses_spec.setdefault(response_code, dict(description="Ok"))
rsp = responses_spec.setdefault(
response_code, {"description": "Ok"}
)
if response_code != "204" and method != "DELETE":
# Arrange response placeholder
schema_name = (
@ -799,8 +828,7 @@ class NeutronGenerator(OpenStackServerSourceBase):
schema = openapi_spec.components.schemas.setdefault(
name,
TypeSchema(
type="object",
description=LiteralScalarString(description),
type="object", description=LiteralScalarString(description)
),
)
# Here come schemas that are not present in Neutron
@ -849,8 +877,12 @@ class NeutronGenerator(OpenStackServerSourceBase):
"QuotasDefaultDefaultResponse",
"QuotasProjectProjectResponse",
]:
schema.properties = {"quota": copy.deepcopy(neutron_schemas.QUOTA_SCHEMA)}
elif name.endswith("TagUpdateRequest") or name.endswith("TagUpdateResponse"):
schema.properties = {
"quota": copy.deepcopy(neutron_schemas.QUOTA_SCHEMA)
}
elif name.endswith("TagUpdateRequest") or name.endswith(
"TagUpdateResponse"
):
# PUT tag does not have request body
return None
@ -885,12 +917,16 @@ class NeutronGenerator(OpenStackServerSourceBase):
send_props = {}
return_props = {}
# Consume request name to required fields mapping
required_fields = neutron_schemas.REQUIRED_FIELDS_MAPPING.get(name, [])
required_fields = neutron_schemas.REQUIRED_FIELDS_MAPPING.get(
name, []
)
for field, data in schema_def.items():
js_schema = get_schema(data)
# Dirty hacks for corrupted schemas
if field in ["availability_zones", "tags"]:
js_schema.update({"type": "array", "items": {"type": "string"}})
js_schema.update(
{"type": "array", "items": {"type": "string"}}
)
elif field == "revision_number":
js_schema.update({"type": "integer"})
elif field == "subnets":
@ -917,14 +953,15 @@ class NeutronGenerator(OpenStackServerSourceBase):
"type": "string",
"format": "hostname",
},
"ip_address": {
"type": "string",
},
"ip_address": {"type": "string"},
},
},
}
)
elif resource_key == "floatingip" and field == "port_forwardings":
elif (
resource_key == "floatingip"
and field == "port_forwardings"
):
js_schema.update(
{
"type": "array",
@ -1014,7 +1051,8 @@ class NeutronGenerator(OpenStackServerSourceBase):
}
)
elif (
resource_key == "security_group" and field == "security_group_rules"
resource_key == "security_group"
and field == "security_group_rules"
):
js_schema.update(
{
@ -1085,7 +1123,9 @@ class NeutronGenerator(OpenStackServerSourceBase):
"maxLength": 255,
"description": "A human-readable description for the resource.",
},
"normalized_cidr": {"type": ["string", "null"]},
"normalized_cidr": {
"type": ["string", "null"]
},
"remote_address_group_id": {
"type": "string",
"description": "The remote address group UUID that is associated with this\nsecurity group rule.",
@ -1112,7 +1152,9 @@ class NeutronGenerator(OpenStackServerSourceBase):
"items": {
"type": "object",
"properties": (
send_props if name.endswith("Request") else return_props
send_props
if name.endswith("Request")
else return_props
),
},
}
@ -1123,7 +1165,9 @@ class NeutronGenerator(OpenStackServerSourceBase):
resource_key: {
"type": "object",
"properties": (
send_props if name.endswith("Request") else return_props
send_props
if name.endswith("Request")
else return_props
),
}
}
@ -1132,7 +1176,7 @@ class NeutronGenerator(OpenStackServerSourceBase):
required_fields
)
else:
logging.warning("No Schema information for %s" % name)
logging.warning(f"No Schema information for {name}")
return f"#/components/schemas/{name}"
@ -1287,34 +1331,22 @@ def get_schema(param_data):
},
}
elif "type:list_of_any_key_specs_or_none" in validate:
logging.warning("TODO: Implement type:list_of_any_key_specs_or_none")
logging.warning(
"TODO: Implement type:list_of_any_key_specs_or_none"
)
schema = {
"type": "array",
"items": {
"type": "object",
"extraProperties": True,
},
"items": {"type": "object", "extraProperties": True},
"x-openstack": {"todo": "implementme"},
}
elif "type:subnet_list" in validate:
schema = {
"type": "array",
"items": {
"type": "string",
},
}
schema = {"type": "array", "items": {"type": "string"}}
elif "type:service_plugin_type" in validate:
schema = {
"type": "string",
}
schema = {"type": "string"}
elif "type:ip_address" in validate:
schema = {
"type": "string",
}
schema = {"type": "string"}
elif "type:ip_address_or_none" in validate:
schema = {
"type": "string",
}
schema = {"type": "string"}
elif "type:subnet_or_none" in validate:
schema = {"type": ["string", "null"]}
elif "type:fip_dns_host_name" in validate:
@ -1346,19 +1378,12 @@ def get_schema(param_data):
},
}
elif "type:nameservers" in validate:
schema = {
"type": "array",
"items": {
"type": "string",
},
}
schema = {"type": "array", "items": {"type": "string"}}
elif "type:list_of_subnet_service_types" in validate:
schema = {
"type": "array",
"description": "The service types associated with the subnet",
"items": {
"type": "string",
},
"items": {"type": "string"},
}
elif "type:dict_or_nodata" in validate:
schema = get_schema(validate["type:dict_or_nodata"])
@ -1367,7 +1392,7 @@ def get_schema(param_data):
elif "type:list_of_subnets_or_none" in validate:
schema = {"type": "array", "items": {"type": "string"}}
else:
raise RuntimeError("Unsupported type %s in %s" % (validate, param_data))
raise RuntimeError(f"Unsupported type {validate} in {param_data}")
schema = {"type": "string"}
if convert_to:
# Nice way to get type of the field, isn't it?

View File

@ -167,10 +167,7 @@ ROUTER_UPDATE_INTERFACE_REQUEST_SCHEMA: dict[str, Any] = {
"description": "The ID of the port. One of subnet_id or port_id must be specified.",
},
},
"oneOf": [
{"required": ["subnet_id"]},
{"required": ["port_id"]},
],
"oneOf": [{"required": ["subnet_id"]}, {"required": ["port_id"]}],
}
ROUTER_INTERFACE_RESPONSE_SCHEMA: dict[str, Any] = {
@ -189,10 +186,7 @@ ROUTER_INTERFACE_RESPONSE_SCHEMA: dict[str, Any] = {
"subnet_ids": {
"type": "array",
"description": "A list of the ID of the subnet which the router interface belongs to. The list contains only one member.",
"items": {
"type": "string",
"format": "uuid",
},
"items": {"type": "string", "format": "uuid"},
"minItems": 1,
"maxItems": 1,
},
@ -237,23 +231,17 @@ ROUTER_UPDATE_EXTRAROUTES_REQUEST_SCHEMA: dict[str, Any] = {
"items": {
"type": "object",
"properties": {
"destination": {
"type": "string",
},
"destination": {"type": "string"},
"nexthop": {
"type": "string",
"oneOf": [
{
"format": "ipv4",
},
{
"format": "ipv6",
},
{"format": "ipv4"},
{"format": "ipv6"},
],
},
},
},
},
}
},
}
},
@ -281,18 +269,12 @@ ROUTER_EXTRAROUTES_RESPONSE_SCHEMA: dict[str, Any] = {
"items": {
"type": "object",
"properties": {
"destination": {
"type": "string",
},
"destination": {"type": "string"},
"nexthop": {
"type": "string",
"oneOf": [
{
"format": "ipv4",
},
{
"format": "ipv6",
},
{"format": "ipv4"},
{"format": "ipv6"},
],
},
},
@ -306,9 +288,7 @@ ROUTER_EXTRAROUTES_RESPONSE_SCHEMA: dict[str, Any] = {
EXTERNAL_GATEWAY_SCHEMA: dict[str, Any] = {
"type": "object",
"properties": {
"enable_snat": {
"type": "boolean",
},
"enable_snat": {"type": "boolean"},
"external_fixed_ips": {
"type": "array",
"items": {
@ -316,15 +296,9 @@ EXTERNAL_GATEWAY_SCHEMA: dict[str, Any] = {
"properties": {
"ip_address": {
"type": "string",
"oneOf": [
{"format": "ipv4"},
{"format": "ipv6"},
],
},
"subnet_id": {
"type": "string",
"format": "uuid",
"oneOf": [{"format": "ipv4"}, {"format": "ipv6"}],
},
"subnet_id": {"type": "string", "format": "uuid"},
},
},
},
@ -343,7 +317,7 @@ ROUTER_ADD_EXTERNAL_GATEWAYS_REQUEST_SCHEMA: dict[str, Any] = {
"type": "array",
"description": "The list of external gateways of the router.",
"items": EXTERNAL_GATEWAY_SCHEMA,
},
}
},
}
},
@ -352,15 +326,15 @@ ROUTER_ADD_EXTERNAL_GATEWAYS_REQUEST_SCHEMA: dict[str, Any] = {
ROUTER_UPDATE_EXTERNAL_GATEWAYS_REQUEST_SCHEMA: dict[str, Any] = copy.deepcopy(
ROUTER_ADD_EXTERNAL_GATEWAYS_REQUEST_SCHEMA
)
ROUTER_UPDATE_EXTERNAL_GATEWAYS_REQUEST_SCHEMA["properties"]["router"]["properties"][
"external_gateways"
]["items"]["properties"]["network_id"]["readOnly"] = True
ROUTER_UPDATE_EXTERNAL_GATEWAYS_REQUEST_SCHEMA["properties"]["router"][
"properties"
]["external_gateways"]["items"]["properties"]["network_id"]["readOnly"] = True
ROUTER_REMOVE_EXTERNAL_GATEWAYS_REQUEST_SCHEMA: dict[str, Any] = copy.deepcopy(
ROUTER_ADD_EXTERNAL_GATEWAYS_REQUEST_SCHEMA
)
ROUTER_REMOVE_EXTERNAL_GATEWAYS_REQUEST_SCHEMA["properties"]["router"]["properties"][
"external_gateways"
]["items"]["properties"].pop("enable_snat")
ROUTER_REMOVE_EXTERNAL_GATEWAYS_REQUEST_SCHEMA["properties"]["router"][
"properties"
]["external_gateways"]["items"]["properties"].pop("enable_snat")
ADDRESS_GROUP_ADDRESS_SCHEMA: dict[str, Any] = {
"type": "object",
@ -371,9 +345,7 @@ ADDRESS_GROUP_ADDRESS_SCHEMA: dict[str, Any] = {
"addresses": {
"type": "array",
"description": "A list of IP addresses.",
"items": {
"type": "string",
},
"items": {"type": "string"},
}
},
}
@ -474,10 +446,7 @@ L3_ROUTER_AGENTS_SCHEMA: dict[str, Any] = {
def _get_schema_ref(
openapi_spec,
name,
description=None,
schema_def=None,
openapi_spec, name, description=None, schema_def=None
) -> tuple[str | None, str | None, bool]:
mime_type: str = "application/json"
ref: str
@ -518,12 +487,18 @@ def _get_schema_ref(
**ROUTER_ADD_EXTERNAL_GATEWAYS_REQUEST_SCHEMA
)
ref = f"#/components/schemas/{name}"
elif name == "RoutersUpdate_External_GatewaysUpdate_External_GatewaysRequest":
elif (
name
== "RoutersUpdate_External_GatewaysUpdate_External_GatewaysRequest"
):
openapi_spec.components.schemas[name] = TypeSchema(
**ROUTER_UPDATE_EXTERNAL_GATEWAYS_REQUEST_SCHEMA
)
ref = f"#/components/schemas/{name}"
elif name == "RoutersRemove_External_GatewaysRemove_External_GatewaysRequest":
elif (
name
== "RoutersRemove_External_GatewaysRemove_External_GatewaysRequest"
):
openapi_spec.components.schemas[name] = TypeSchema(
**ROUTER_REMOVE_EXTERNAL_GATEWAYS_REQUEST_SCHEMA
)
@ -552,10 +527,14 @@ def _get_schema_ref(
ref = "#/components/schemas/Address_GroupShowResponse"
elif name == "AgentsL3_RoutersIndexResponse":
openapi_spec.components.schemas[name] = TypeSchema(**L3_ROUTER_AGENTS_SCHEMA)
openapi_spec.components.schemas[name] = TypeSchema(
**L3_ROUTER_AGENTS_SCHEMA
)
ref = f"#/components/schemas/{name}"
elif name == "AgentsL3_RoutersIndexResponse":
openapi_spec.components.schemas[name] = TypeSchema(**L3_ROUTER_AGENTS_SCHEMA)
openapi_spec.components.schemas[name] = TypeSchema(
**L3_ROUTER_AGENTS_SCHEMA
)
ref = f"#/components/schemas/{name}"
elif name == "AgentsL3_RoutersCreateRequest":
openapi_spec.components.schemas[name] = TypeSchema(

View File

@ -68,24 +68,24 @@ class NovaGenerator(OpenStackServerSourceBase):
openapi_spec = self.load_openapi(impl_path)
if not openapi_spec:
openapi_spec = SpecSchema(
info=dict(
title="OpenStack Compute API",
description=LiteralScalarString(
info={
"title": "OpenStack Compute API",
"description": LiteralScalarString(
"Compute API provided by Nova service"
),
version=self.api_version,
),
"version": self.api_version,
},
openapi="3.1.0",
security=[{"ApiKeyAuth": []}],
components=dict(
securitySchemes={
components={
"securitySchemes": {
"ApiKeyAuth": {
"type": "apiKey",
"in": "header",
"name": "X-Auth-Token",
}
},
),
}
},
)
for route in self.router.map.matchlist:
@ -226,7 +226,9 @@ class NovaGenerator(OpenStackServerSourceBase):
]:
schema = openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**nova_schemas.SERVER_ACTION_CREATE_IMAGE_RESPONSE_SCHEMA),
TypeSchema(
**nova_schemas.SERVER_ACTION_CREATE_IMAGE_RESPONSE_SCHEMA
),
)
ref = f"#/components/schemas/{name}"
elif name in [
@ -241,7 +243,9 @@ class NovaGenerator(OpenStackServerSourceBase):
elif name == "ServersActionOs-GetconsoleoutputResponse":
schema = openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**nova_schemas.SERVER_ACTION_GET_CONSOLE_OUTPUT_SCHEMA),
TypeSchema(
**nova_schemas.SERVER_ACTION_GET_CONSOLE_OUTPUT_SCHEMA
),
)
ref = f"#/components/schemas/{name}"
elif name in [
@ -271,7 +275,9 @@ class NovaGenerator(OpenStackServerSourceBase):
elif name == "ServersIpShowResponse":
schema = openapi_spec.components.schemas.setdefault(
name,
TypeSchema(maxProperties=1, **nova_schemas.SERVER_ADDRESSES_SCHEMA),
TypeSchema(
maxProperties=1, **nova_schemas.SERVER_ADDRESSES_SCHEMA
),
)
ref = f"#/components/schemas/{name}"
# /servers/id/metadata
@ -299,7 +305,9 @@ class NovaGenerator(OpenStackServerSourceBase):
elif name == "ServersOs_Instance_ActionShowResponse":
schema = openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**nova_schemas.SERVER_INSTANCE_ACTION_CONTAINER_SCHEMA),
TypeSchema(
**nova_schemas.SERVER_INSTANCE_ACTION_CONTAINER_SCHEMA
),
)
ref = f"#/components/schemas/{name}"
# /server/id/os-interface-attachment
@ -315,7 +323,9 @@ class NovaGenerator(OpenStackServerSourceBase):
]:
schema = openapi_spec.components.schemas.setdefault(
name,
TypeSchema(**nova_schemas.INTERFACE_ATTACHMENT_CONTAINER_SCHEMA),
TypeSchema(
**nova_schemas.INTERFACE_ATTACHMENT_CONTAINER_SCHEMA
),
)
ref = f"#/components/schemas/{name}"
# /server/id/os-server-password
@ -639,8 +649,12 @@ class NovaGenerator(OpenStackServerSourceBase):
"""Hook to allow service specific generator to modify details"""
if operation_spec.operationId == "servers/id/action:post":
# Sereral server actions may return Location header
operation_spec.responses.setdefault("202", {"description": "Accepted"})
headers_202 = operation_spec.responses["202"].setdefault("headers", {})
operation_spec.responses.setdefault(
"202", {"description": "Accepted"}
)
headers_202 = operation_spec.responses["202"].setdefault(
"headers", {}
)
headers_202.setdefault(
"Location",
HeaderSchema(

View File

@ -44,9 +44,7 @@ SERVER_TAGS_SCHEMA: dict[str, Any] = {
"tags": {
"type": "array",
"description": "A list of tags. The maximum count of tags in this list is 50.",
"items": {
"type": "string",
},
"items": {"type": "string"},
}
},
}
@ -65,9 +63,7 @@ SERVER_TOPOLOGY_SCHEMA: dict[str, Any] = {
"cpu_pinning": {
"type": "object",
"description": "The mapping of server cores to host physical CPU",
"additionalProperties": {
"type": "integer",
},
"additionalProperties": {"type": "integer"},
},
"vcpu_set": {
"type": "array",
@ -192,7 +188,9 @@ FLAVORS_LIST_SCHEMA: dict[str, Any] = {
FLAVORS_LIST_DETAIL_SCHEMA: dict[str, Any] = {
"description": "Detailed flavors list response",
"type": "object",
"properties": {"flavors": {"type": "array", "items": copy.deepcopy(FLAVOR_SCHEMA)}},
"properties": {
"flavors": {"type": "array", "items": copy.deepcopy(FLAVOR_SCHEMA)}
},
}
FLAVOR_ACCESS_SCHEMA: dict[str, Any] = {
@ -270,7 +268,7 @@ LIMITS_SCHEMA: dict[str, Any] = {
},
},
"additionalProperties": {"type": "integer"},
},
}
},
}
},
@ -467,16 +465,16 @@ REMOTE_CONSOLE_SCHEMA: dict[str, Any] = {
"properties": {
"protocol": {
"type": "string",
"enum": remote_consoles.create_v28["properties"]["remote_console"][
"properties"
]["protocol"]["enum"],
"enum": remote_consoles.create_v28["properties"][
"remote_console"
]["properties"]["protocol"]["enum"],
"description": "The protocol of remote console. The valid values are vnc, spice, rdp, serial and mks. The protocol mks is added since Microversion 2.8.",
},
"type": {
"type": "string",
"enum": remote_consoles.create_v28["properties"]["remote_console"][
"properties"
]["type"]["enum"],
"enum": remote_consoles.create_v28["properties"][
"remote_console"
]["properties"]["type"]["enum"],
"description": "The type of remote console. The valid values are novnc, rdp-html5, spice-html5, serial, and webmks. The type webmks is added since Microversion 2.8.",
},
"url": {
@ -786,9 +784,7 @@ KEYPAIR_LIST_SCHEMA: dict[str, Any] = {
"description": "Array of Keypair objects",
"items": {
"type": "object",
"properties": {
"keypair": copy.deepcopy(KEYPAIR_SHORT_SCHEMA),
},
"properties": {"keypair": copy.deepcopy(KEYPAIR_SHORT_SCHEMA)},
},
},
"keypairs_links": copy.deepcopy(LINKS_SCHEMA),
@ -833,8 +829,12 @@ KEYPAIR_CONTAINER_SCHEMA: dict[str, Any] = {
"properties": {"keypair": KEYPAIR_SCHEMA},
}
KEYPAIR_CREATED_SCHEMA: dict[str, Any] = copy.deepcopy(KEYPAIR_CONTAINER_SCHEMA)
KEYPAIR_CREATED_SCHEMA["properties"]["keypair"]["properties"]["private_key"] = {
KEYPAIR_CREATED_SCHEMA: dict[str, Any] = copy.deepcopy(
KEYPAIR_CONTAINER_SCHEMA
)
KEYPAIR_CREATED_SCHEMA["properties"]["keypair"]["properties"][
"private_key"
] = {
"type": "string",
"description": "If you do not provide a public key on create, a new keypair will be built for you, and the private key will be returned during the initial create call. Make sure to save this, as there is no way to get this private key again in the future.",
"x-openstack": {"max-ver": "2.91"},
@ -1031,7 +1031,7 @@ SERVER_MIGRATION_LIST_SCHEMA: dict[str, Any] = {
"migrations": {
"type": "array",
"items": copy.deepcopy(SERVER_MIGRATION_SCHEMA),
},
}
},
}
SERVER_MIGRATION_CONTAINER_SCHEMA: dict[str, Any] = {
@ -1272,7 +1272,7 @@ SERVER_GROUP_SCHEMA: dict[str, Any] = {
"type": "object",
"description": "The rules field, which is a dict, can be applied to the policy. Currently, only the max_server_per_host rule is supported for the anti-affinity policy. The max_server_per_host rule allows specifying how many members of the anti-affinity group can reside on the same compute host. If not specified, only one member from the same anti-affinity group can reside on a given host.",
"properties": {
"max_server_per_host": parameter_types.positive_integer,
"max_server_per_host": parameter_types.positive_integer
},
"additionalProperties": False,
"x-openstack": {"min-ver": "2.64"},
@ -2267,7 +2267,7 @@ SERVER_SECURITY_GROUPS_LIST_SCHEMA: dict[str, Any] = {
},
},
},
},
}
},
}

View File

@ -179,24 +179,24 @@ class OctaviaGenerator(OpenStackServerSourceBase):
openapi_spec = self.load_openapi(Path(impl_path))
if not openapi_spec:
openapi_spec = SpecSchema(
info=dict(
title="OpenStack Load Balancing API",
description=LiteralScalarString(
info={
"title": "OpenStack Load Balancing API",
"description": LiteralScalarString(
"Load Balancing API provided by Octavia service"
),
version=self.api_version,
),
"version": self.api_version,
},
openapi="3.1.0",
security=[{"ApiKeyAuth": []}],
components=dict(
securitySchemes={
components={
"securitySchemes": {
"ApiKeyAuth": {
"type": "apiKey",
"in": "header",
"name": "X-Auth-Token",
}
},
),
}
},
)
config.register_cli_opts()
@ -385,7 +385,9 @@ class OctaviaGenerator(OpenStackServerSourceBase):
self._process_route(route, openapi_spec, framework="pecan")
if args.api_ref_src:
merge_api_ref_doc(openapi_spec, args.api_ref_src, allow_strip_version=False)
merge_api_ref_doc(
openapi_spec, args.api_ref_src, allow_strip_version=False
)
self.dump_openapi(openapi_spec, Path(impl_path), args.validate)

View File

@ -15,17 +15,13 @@ from pathlib import Path
from ruamel.yaml.scalarstring import LiteralScalarString
from codegenerator.common.schema import (
SpecSchema,
)
from codegenerator.common.schema import SpecSchema
from codegenerator.openapi.base import OpenStackServerSourceBase
from codegenerator.openapi.utils import merge_api_ref_doc
class PlacementGenerator(OpenStackServerSourceBase):
URL_TAG_MAP = {
"/versions": "version",
}
URL_TAG_MAP = {"/versions": "version"}
def _api_ver_major(self, ver):
return ver.ver_major
@ -65,24 +61,24 @@ class PlacementGenerator(OpenStackServerSourceBase):
openapi_spec = self.load_openapi(impl_path)
if not openapi_spec:
openapi_spec = SpecSchema(
info=dict(
title="OpenStack Placement API",
description=LiteralScalarString(
info={
"title": "OpenStack Placement API",
"description": LiteralScalarString(
"Placement API provided by Placement service"
),
version=self.api_version,
),
"version": self.api_version,
},
openapi="3.1.0",
security=[{"ApiKeyAuth": []}],
components=dict(
securitySchemes={
components={
"securitySchemes": {
"ApiKeyAuth": {
"type": "apiKey",
"in": "header",
"name": "X-Auth-Token",
}
},
),
}
},
)
for route in self.router.matchlist:
@ -92,9 +88,7 @@ class PlacementGenerator(OpenStackServerSourceBase):
if args.api_ref_src:
merge_api_ref_doc(
openapi_spec,
args.api_ref_src,
allow_strip_version=False,
openapi_spec, args.api_ref_src, allow_strip_version=False
)
self.dump_openapi(openapi_spec, impl_path, args.validate)

View File

@ -40,7 +40,7 @@ def merge_api_ref_doc(
processed_operations: set[str] = set()
# Iterate over api-ref docs
for api_ref_doc in api_ref_src:
with open(api_ref_doc, "r") as fp:
with open(api_ref_doc) as fp:
html_doc = fp.read()
# openapi_spec = jsonref.replace_refs(openapi_spec)
@ -71,7 +71,9 @@ def merge_api_ref_doc(
# TODO(gtema): notes are aside of main "p" and not
# underneath
# Iterate over URLs
operation_url_containers = section.find_all("div", class_="operation-grp")
operation_url_containers = section.find_all(
"div", class_="operation-grp"
)
for op in operation_url_containers:
ep = op.find("div", class_="endpoint-container")
ep_divs = ep.find_all("div")
@ -108,7 +110,9 @@ def merge_api_ref_doc(
# Paths have different length. Skip
continue
is_search_aborted = False
for source, doc in zip(existing_path_parts, doc_url_parts):
for source, doc in zip(
existing_path_parts, doc_url_parts
):
source_ = source.strip("{}")
doc_ = doc.strip("{}")
if (
@ -147,18 +151,19 @@ def merge_api_ref_doc(
break
if not path_spec:
logging.info("Cannot find path %s in the spec" % url)
logging.info(f"Cannot find path {url} in the spec")
continue
op_spec = getattr(path_spec, method.lower(), None)
if not op_spec:
logging.warn(
"Cannot find %s operation for %s in the spec" % (method, url)
f"Cannot find {method} operation for {url} in the spec"
)
continue
if op_spec.operationId in processed_operations and not url.endswith(
"/action"
if (
op_spec.operationId in processed_operations
and not url.endswith("/action")
):
# Do not update operation we have already processed
continue
@ -169,7 +174,9 @@ def merge_api_ref_doc(
# details section
details_button = op.find("button")
details_section_id = details_button["data-target"].strip("#")
details_section = section.find("section", id=details_section_id)
details_section = section.find(
"section", id=details_section_id
)
description = []
action_name = None
# Gather description section paragraphs to construct operation description
@ -179,9 +186,11 @@ def merge_api_ref_doc(
elif details_child.name == "section":
if (
details_child.h3 and "Request" in details_child.h3.strings
details_child.h3
and "Request" in details_child.h3.strings
) or (
details_child.h4 and "Request" in details_child.h4.strings
details_child.h4
and "Request" in details_child.h4.strings
):
# Found request details
if not details_child.table:
@ -210,13 +219,15 @@ def merge_api_ref_doc(
method,
)
continue
(schema_specs, action_name) = _get_schema_candidates(
openapi_spec,
url,
spec_body,
action_name,
summary,
description,
(schema_specs, action_name) = (
_get_schema_candidates(
openapi_spec,
url,
spec_body,
action_name,
summary,
description,
)
)
_doc_process_operation_table(
@ -235,13 +246,15 @@ def merge_api_ref_doc(
details_child.h3
and (
"Response" in details_child.h3.strings
or "Response Parameters" in details_child.h3.strings
or "Response Parameters"
in details_child.h3.strings
)
) or (
details_child.h4
and (
"Response" in details_child.h4.strings
or "Response Parameters" in details_child.h4.strings
or "Response Parameters"
in details_child.h4.strings
)
):
# Found response details
@ -275,8 +288,10 @@ def merge_api_ref_doc(
op_spec.operationId,
)
continue
(schema_specs, action_name) = _get_schema_candidates(
openapi_spec, url, spec_body, action_name
(schema_specs, action_name) = (
_get_schema_candidates(
openapi_spec, url, spec_body, action_name
)
)
try:
_doc_process_operation_table(
@ -301,11 +316,7 @@ def merge_api_ref_doc(
def _doc_process_operation_table(
tbody,
openapi_spec,
op_spec,
schema_specs,
doc_source_param_mapping,
tbody, openapi_spec, op_spec, schema_specs, doc_source_param_mapping
):
"""Process DOC table (Request/Reseponse) and try to set description to
the matching schema property"""
@ -335,7 +346,9 @@ def _doc_process_operation_table(
param_def.location == doc_param_location
and param_def.name == doc_param_name
):
param_def.description = LiteralScalarString(doc_param_descr)
param_def.description = LiteralScalarString(
doc_param_descr
)
elif doc_param_location == "body":
# Body param. Traverse through body information
for schema in schema_specs:
@ -362,7 +375,9 @@ def _find_schema_property(schema, target_prop_name):
return
for prop_name, prop_def in props.items():
prop_type = (
prop_def.get("type") if isinstance(prop_def, dict) else prop_def.type
prop_def.get("type")
if isinstance(prop_def, dict)
else prop_def.type
)
if prop_name == target_prop_name:
return prop_def
@ -397,7 +412,9 @@ def _find_schema_property(schema, target_prop_name):
elif xtype == "array":
items_schema = (
schema.items if isinstance(schema, TypeSchema) else schema.get("items")
schema.items
if isinstance(schema, TypeSchema)
else schema.get("items")
)
candidate = _find_schema_property(items_schema, target_prop_name)
if candidate:
@ -434,7 +451,9 @@ def _get_schema_candidates(
ref = spec_body.get("$ref")
oneOf = spec_body.get("oneOf")
if spec_body and ref:
candidate_schema = openapi_spec.components.schemas.get(ref.split("/")[-1])
candidate_schema = openapi_spec.components.schemas.get(
ref.split("/")[-1]
)
if candidate_schema.oneOf:
for x in candidate_schema.oneOf:
ref = x.get("$ref") if isinstance(x, dict) else x.ref
@ -476,7 +495,9 @@ def _get_schema_candidates(
elif not action_name and section_description:
if candidate_action_name and (
re.search(rf"\b{candidate_action_name}\b", section_summary)
re.search(
rf"\b{candidate_action_name}\b", section_summary
)
or (
url.endswith("/volumes/{volume_id}/action")
# Cinder doc does not contain action name in the
@ -496,7 +517,9 @@ def _get_schema_candidates(
itms = res.get("oneOf")
if itms:
for itm in itms:
schema_specs.append(get_schema(openapi_spec, itm))
schema_specs.append(
get_schema(openapi_spec, itm)
)
schema_specs.append(res)
# Set the action name. Since
# Request normally comes before

View File

@ -72,7 +72,7 @@ class OpenApiSchemaGenerator(BaseGenerator):
self, res, target_dir, openapi_spec=None, operation_id=None, args=None
):
"""Generate Schema definition file for Resource"""
logging.debug("Generating OpenAPI schema data in %s" % target_dir)
logging.debug(f"Generating OpenAPI schema data in {target_dir}")
# We do not import generators since due to the use of Singletons in the
# code importing glance, nova, cinder at the same time crashes
# dramatically
@ -93,4 +93,6 @@ class OpenApiSchemaGenerator(BaseGenerator):
elif args.service_type == "shared-file-system":
self.generate_manila(target_dir, args)
else:
raise RuntimeError("Service type %s is not supported", args.service_type)
raise RuntimeError(
"Service type %s is not supported", args.service_type
)

View File

@ -42,21 +42,23 @@ class OSCGenerator(BaseGenerator):
def generate(self, res, target_dir, args=None):
"""Generate code for the OpenStackClient"""
logging.debug("Generating OpenStackClient code in %s" % target_dir)
logging.debug(f"Generating OpenStackClient code in {target_dir}")
osc_path = res.mod_name.split(".")[1:]
context = dict(
res=res.resource_class,
sdk_mod_name=res.mod_name,
osc_mod_name=res.mod_name.replace("openstack.", "openstackclient."),
class_name=res.class_name,
resource_name=res.class_name.lower(),
sdk_service_name=res.service_name,
proxy=res.proxy_obj,
fqcn=res.fqcn,
registry_name=res.registry_name,
attrs=res.attrs,
)
context = {
"res": res.resource_class,
"sdk_mod_name": res.mod_name,
"osc_mod_name": res.mod_name.replace(
"openstack.", "openstackclient."
),
"class_name": res.class_name,
"resource_name": res.class_name.lower(),
"sdk_service_name": res.service_name,
"proxy": res.proxy_obj,
"fqcn": res.fqcn,
"registry_name": res.registry_name,
"attrs": res.attrs,
}
work_dir = Path(target_dir)
work_dir.mkdir(parents=True, exist_ok=True)
@ -78,7 +80,9 @@ class OSCGenerator(BaseGenerator):
context,
osc_path,
"osc/impl_list.py.j2",
Path(work_dir, "openstackclient", "/".join(osc_path), "list.py"),
Path(
work_dir, "openstackclient", "/".join(osc_path), "list.py"
),
"osc/test_unit_list.py.j2",
Path(
work_dir,
@ -96,7 +100,9 @@ class OSCGenerator(BaseGenerator):
context,
osc_path,
"osc/impl_show.py.j2",
Path(work_dir, "openstackclient", "/".join(osc_path), "show.py"),
Path(
work_dir, "openstackclient", "/".join(osc_path), "show.py"
),
"osc/test_unit_show.py.j2",
Path(
work_dir,
@ -161,10 +167,7 @@ class OSCGenerator(BaseGenerator):
osc_path,
"osc/impl_set.py.j2",
Path(
work_dir,
"openstackclient",
"/".join(osc_path),
"set.py",
work_dir, "openstackclient", "/".join(osc_path), "set.py"
),
"osc/test_unit_set.py.j2",
Path(
@ -183,10 +186,7 @@ class OSCGenerator(BaseGenerator):
osc_path,
"osc/impl_unset.py.j2",
Path(
work_dir,
"openstackclient",
"/".join(osc_path),
"unset.py",
work_dir, "openstackclient", "/".join(osc_path), "unset.py"
),
"osc/test_unit_unset.py.j2",
Path(

View File

@ -41,7 +41,7 @@ class BooleanFlag(common_rust.Boolean):
"""Boolean parameter that is represented as a CLI flag"""
type_hint: str = "bool"
clap_macros: set[str] = set(["action=clap::ArgAction::SetTrue"])
clap_macros: set[str] = {"action=clap::ArgAction::SetTrue"}
original_data_type: BaseCompoundType | BasePrimitiveType | None = None
@ -58,14 +58,14 @@ class String(common_rust.String):
@property
def imports(self) -> set[str]:
if self.format and self.format == "password":
return set(["dialoguer::Password"])
return set([])
return {"dialoguer::Password"}
return set()
class IntString(common.BasePrimitiveType):
"""CLI Integer or String"""
imports: set[str] = set(["crate::common::IntString"])
imports: set[str] = {"crate::common::IntString"}
type_hint: str = "IntString"
clap_macros: set[str] = set()
@ -73,7 +73,7 @@ class IntString(common.BasePrimitiveType):
class NumString(common.BasePrimitiveType):
"""CLI Number or String"""
imports: set[str] = set(["crate::common::NumString"])
imports: set[str] = {"crate::common::NumString"}
type_hint: str = "NumString"
clap_macros: set[str] = set()
@ -81,7 +81,7 @@ class NumString(common.BasePrimitiveType):
class BoolString(common.BasePrimitiveType):
"""CLI Boolean or String"""
imports: set[str] = set(["crate::common::BoolString"])
imports: set[str] = {"crate::common::BoolString"}
type_hint: str = "BoolString"
clap_macros: set[str] = set()
@ -89,7 +89,7 @@ class BoolString(common.BasePrimitiveType):
class VecString(common.BasePrimitiveType):
"""CLI Vector of strings"""
imports: set[str] = set(["crate::common::VecString"])
imports: set[str] = {"crate::common::VecString"}
type_hint: str = "VecString"
clap_macros: set[str] = set()
@ -97,12 +97,12 @@ class VecString(common.BasePrimitiveType):
class JsonValue(common_rust.JsonValue):
"""Arbitrary JSON value"""
clap_macros: set[str] = set(['value_name="JSON"', "value_parser=parse_json"])
clap_macros: set[str] = {'value_name="JSON"', "value_parser=parse_json"}
original_data_type: BaseCombinedType | BaseCompoundType | None = None
@property
def imports(self):
imports: set[str] = set(["crate::common::parse_json", "serde_json::Value"])
imports: set[str] = {"crate::common::parse_json", "serde_json::Value"}
if self.original_data_type and isinstance(
self.original_data_type, common_rust.Dictionary
):
@ -130,7 +130,7 @@ class StructInputField(common_rust.StructField):
@property
def builder_macros(self):
macros: set[str] = set([])
macros: set[str] = set()
if not isinstance(self.data_type, BaseCompoundType):
macros.update(self.data_type.builder_macros)
else:
@ -141,7 +141,7 @@ class StructInputField(common_rust.StructField):
@property
def serde_macros(self):
macros = set([])
macros = set()
if self.local_name != self.remote_name:
macros.add(f'rename="{self.remote_name}"')
return f"#[serde({', '.join(sorted(macros))})]"
@ -156,7 +156,7 @@ class StructInputField(common_rust.StructField):
self.data_type.item_type, common_rust.Struct
):
return "#[command(flatten)]"
macros = set(["long"])
macros = {"long"}
try:
if self.data_type.clap_macros:
macros.update(self.data_type.clap_macros)
@ -173,7 +173,7 @@ class StructInputField(common_rust.StructField):
# For substrucs (and maybe enums) we tell Clap to flatten subtype
# instead of exposing attr itself
return "#[command(flatten)]"
macros = set(["long"])
macros = {"long"}
if is_group and not self.is_optional:
macros.add("required=false")
try:
@ -196,7 +196,7 @@ class StructInput(common_rust.Struct):
@property
def imports(self):
imports: set[str] = set(["serde::Deserialize"])
imports: set[str] = {"serde::Deserialize"}
for field in self.fields.values():
imports.update(field.data_type.imports)
if self.additional_fields_type:
@ -214,7 +214,9 @@ class EnumGroupStructInputField(StructInputField):
class EnumGroupStruct(common_rust.Struct):
"""Container for complex Enum containing Array"""
field_type_class_: Type[common_rust.StructField] = EnumGroupStructInputField
field_type_class_: Type[common_rust.StructField] = (
EnumGroupStructInputField
)
base_type: str = "struct"
sdk_enum_name: str
is_group: bool = True
@ -234,7 +236,7 @@ class StructFieldResponse(common_rust.StructField):
@property
def serde_macros(self):
macros = set([])
macros = set()
if self.local_name != self.remote_name:
macros.add(f'rename="{self.remote_name}"')
return f"#[serde({', '.join(sorted(macros))})]"
@ -246,23 +248,29 @@ class StructFieldResponse(common_rust.StructField):
resource_name: str,
operation_type: str,
):
macros = set([])
macros = set()
if self.is_optional:
macros.add("optional")
if self.local_name != self.remote_name:
macros.add(f'title="{self.remote_name}"')
# Fully Qualified Attribute Name
fqan: str = ".".join([service_name, resource_name, self.remote_name]).lower()
fqan: str = ".".join(
[service_name, resource_name, self.remote_name]
).lower()
# Check the known alias of the field by FQAN
alias = common.FQAN_ALIAS_MAP.get(fqan)
if operation_type in ["list", "list_from_struct"]:
if (
"id" in struct.fields.keys()
and not (self.local_name in BASIC_FIELDS or alias in BASIC_FIELDS)
and not (
self.local_name in BASIC_FIELDS or alias in BASIC_FIELDS
)
) or (
"id" not in struct.fields.keys()
and (self.local_name not in list(struct.fields.keys())[-10:])
and not (self.local_name in BASIC_FIELDS or alias in BASIC_FIELDS)
and not (
self.local_name in BASIC_FIELDS or alias in BASIC_FIELDS
)
):
# Only add "wide" flag if field is not in the basic fields AND
# there is at least "id" field existing in the struct OR the
@ -283,7 +291,7 @@ class StructResponse(common_rust.Struct):
@property
def imports(self):
imports: set[str] = set(["serde::Deserialize"])
imports: set[str] = {"serde::Deserialize"}
for field in self.fields.values():
imports.update(field.data_type.imports)
# In difference to the SDK and Input we do not currently handle
@ -302,7 +310,7 @@ class TupleStruct(common_rust.Struct):
@property
def imports(self):
imports: set[str] = set([])
imports: set[str] = set()
for field in self.tuple_fields:
imports.update(field.data_type.imports)
return imports
@ -318,7 +326,7 @@ class DictionaryInput(common_rust.Dictionary):
@property
def imports(self):
imports = set([])
imports = set()
if not isinstance(self.value_type, common_rust.Option):
imports.add("crate::common::parse_key_val")
else:
@ -328,26 +336,21 @@ class DictionaryInput(common_rust.Dictionary):
@property
def clap_macros(self):
macros = set(
[
"long",
'value_name="key=value"',
]
)
macros = {"long", 'value_name="key=value"'}
if not isinstance(self.value_type, common_rust.Option):
macros.add(
f"value_parser=parse_key_val::<String, {self.value_type.type_hint}>",
f"value_parser=parse_key_val::<String, {self.value_type.type_hint}>"
)
else:
macros.add(
f"value_parser=parse_key_val_opt::<String, {self.value_type.item_type.type_hint}>",
f"value_parser=parse_key_val_opt::<String, {self.value_type.item_type.type_hint}>"
)
return macros
class StringEnum(common_rust.StringEnum):
imports: set[str] = set(["clap::ValueEnum"])
imports: set[str] = {"clap::ValueEnum"}
class ArrayInput(common_rust.Array):
@ -360,7 +363,7 @@ class ArrayInput(common_rust.Array):
@property
def clap_macros(self):
macros: set[str] = set(["long", "action=clap::ArgAction::Append"])
macros: set[str] = {"long", "action=clap::ArgAction::Append"}
macros.update(self.item_type.clap_macros)
return macros
@ -420,7 +423,9 @@ class RequestParameter(common_rust.RequestParameter):
class RequestTypeManager(common_rust.TypeManager):
primitive_type_mapping: dict[Type[model.PrimitiveType], Type[BasePrimitiveType]] = {
primitive_type_mapping: dict[
Type[model.PrimitiveType], Type[BasePrimitiveType]
] = {
model.PrimitiveString: String,
model.ConstraintString: String,
model.PrimitiveAny: JsonValue,
@ -438,13 +443,17 @@ class RequestTypeManager(common_rust.TypeManager):
model.Set: ArrayInput,
}
request_parameter_class: Type[common_rust.RequestParameter] = RequestParameter
request_parameter_class: Type[common_rust.RequestParameter] = (
RequestParameter
)
string_enum_class = StringEnum
def get_local_attribute_name(self, name: str) -> str:
"""Get localized attribute name"""
name = name.replace(".", "_")
attr_name = "_".join(x.lower() for x in re.split(common.SPLIT_NAME_RE, name))
attr_name = "_".join(
x.lower() for x in re.split(common.SPLIT_NAME_RE, name)
)
if attr_name in ["type", "self", "enum", "ref", "default"]:
attr_name = f"_{attr_name}"
return attr_name
@ -469,7 +478,9 @@ class RequestTypeManager(common_rust.TypeManager):
# Field is of Enum type.
if isinstance(result, common_rust.Enum):
variant_classes = [x.data_type.__class__ for x in result.kinds.values()]
variant_classes = [
x.data_type.__class__ for x in result.kinds.values()
]
if (
StringEnum in variant_classes
@ -522,12 +533,13 @@ class RequestTypeManager(common_rust.TypeManager):
return result
def convert_model(
self,
type_model: model.PrimitiveType | model.ADT | model.Reference,
self, type_model: model.PrimitiveType | model.ADT | model.Reference
) -> BasePrimitiveType | BaseCombinedType | BaseCompoundType:
"""Get local destination type from the ModelType"""
model_ref: model.Reference | None = None
typ: BasePrimitiveType | BaseCombinedType | BaseCompoundType | None = None
typ: BasePrimitiveType | BaseCombinedType | BaseCompoundType | None = (
None
)
if isinstance(type_model, model.Reference):
model_ref = type_model
@ -557,7 +569,10 @@ class RequestTypeManager(common_rust.TypeManager):
else:
item_type = type_model.item_type
if isinstance(item_type, model.Struct) and len(item_type.fields.keys()) > 1:
if (
isinstance(item_type, model.Struct)
and len(item_type.fields.keys()) > 1
):
# An array of structs with more then 1 field
# Array of Structs can not be handled by the CLI (input).
# Therefore handle underlaying structure as Json saving
@ -592,7 +607,9 @@ class RequestTypeManager(common_rust.TypeManager):
):
original_data_type = self.convert_model(type_model.value_type)
typ = JsonValue(
original_data_type=DictionaryInput(value_type=original_data_type)
original_data_type=DictionaryInput(
value_type=original_data_type
)
)
if typ:
@ -608,7 +625,9 @@ class RequestTypeManager(common_rust.TypeManager):
struct_class = self.data_type_mapping[model.Struct]
mod = struct_class(
name=self.get_model_name(type_model.reference),
description=common_rust.sanitize_rust_docstrings(type_model.description),
description=common_rust.sanitize_rust_docstrings(
type_model.description
),
)
field_class = mod.field_type_class_
for field_name, field in type_model.fields.items():
@ -637,7 +656,9 @@ class RequestTypeManager(common_rust.TypeManager):
)
and not (
# and not Option<Primitive>
isinstance(field_data_type.value_type, self.option_type_class)
isinstance(
field_data_type.value_type, self.option_type_class
)
and isinstance(
field_data_type.value_type.item_type,
common_rust.BasePrimitiveType,
@ -646,9 +667,13 @@ class RequestTypeManager(common_rust.TypeManager):
):
dict_type_model = self._get_adt_by_reference(field.data_type)
simplified_data_type = JsonValue()
simplified_data_type.original_data_type = field_data_type.value_type
simplified_data_type.original_data_type = (
field_data_type.value_type
)
field_data_type.value_type = simplified_data_type
self.ignored_models.append(dict_type_model.value_type.reference)
self.ignored_models.append(
dict_type_model.value_type.reference
)
elif isinstance(field_data_type, StructInput):
# Check if one of the sub fields has same attribute name as in the current struct.
# Ideally this should not ever happen, but i.e. image.namespace.property has the case
@ -666,12 +691,16 @@ class RequestTypeManager(common_rust.TypeManager):
f = field_class(
local_name=self.get_local_attribute_name(field_name),
remote_name=self.get_remote_attribute_name(field_name),
description=common_rust.sanitize_rust_docstrings(field.description),
description=common_rust.sanitize_rust_docstrings(
field.description
),
data_type=field_data_type,
is_optional=not field.is_required,
is_nullable=is_nullable,
)
if mod.name != "Request" and isinstance(field_data_type, struct_class):
if mod.name != "Request" and isinstance(
field_data_type, struct_class
):
field_data_type.is_group = True
field_data_type.is_required = field.is_required
if isinstance(field_data_type, self.option_type_class):
@ -743,7 +772,9 @@ class RequestTypeManager(common_rust.TypeManager):
class ResponseTypeManager(common_rust.TypeManager):
primitive_type_mapping: dict[Type[model.PrimitiveType], Type[BasePrimitiveType]] = {
primitive_type_mapping: dict[
Type[model.PrimitiveType], Type[BasePrimitiveType]
] = {
model.PrimitiveString: common_rust.String,
model.ConstraintString: common_rust.String,
}
@ -764,16 +795,18 @@ class ResponseTypeManager(common_rust.TypeManager):
if not model_ref:
return "Response"
return "Response" + "".join(
x.capitalize() for x in re.split(common.SPLIT_NAME_RE, model_ref.name)
x.capitalize()
for x in re.split(common.SPLIT_NAME_RE, model_ref.name)
)
def convert_model(
self,
type_model: model.PrimitiveType | model.ADT | model.Reference,
self, type_model: model.PrimitiveType | model.ADT | model.Reference
) -> BasePrimitiveType | BaseCombinedType | BaseCompoundType:
"""Get local destination type from the ModelType"""
model_ref: model.Reference | None = None
typ: BasePrimitiveType | BaseCombinedType | BaseCompoundType | None = None
typ: BasePrimitiveType | BaseCombinedType | BaseCompoundType | None = (
None
)
if isinstance(type_model, model.Reference):
model_ref = type_model
type_model = self._get_adt_by_reference(model_ref)
@ -814,7 +847,9 @@ class ResponseTypeManager(common_rust.TypeManager):
# There is no sense of Enum in the output. Convert to the plain
# string
typ = String(
description=common_rust.sanitize_rust_docstrings(typ.description)
description=common_rust.sanitize_rust_docstrings(
typ.description
)
)
if (
typ
@ -829,18 +864,23 @@ class ResponseTypeManager(common_rust.TypeManager):
def _simplify_oneof_combinations(self, type_model, kinds):
"""Simplify certain known oneOf combinations"""
kinds_classes = [x["class"] for x in kinds]
if common_rust.String in kinds_classes and common_rust.Number in kinds_classes:
if (
common_rust.String in kinds_classes
and common_rust.Number in kinds_classes
):
# oneOf [string, number] => NumString
kinds.clear()
kinds.append({"local": NumString(), "class": NumString})
elif (
common_rust.String in kinds_classes and common_rust.Integer in kinds_classes
common_rust.String in kinds_classes
and common_rust.Integer in kinds_classes
):
# oneOf [string, integer] => NumString
kinds.clear()
kinds.append({"local": IntString(), "class": IntString})
elif (
common_rust.String in kinds_classes and common_rust.Boolean in kinds_classes
common_rust.String in kinds_classes
and common_rust.Boolean in kinds_classes
):
# oneOf [string, boolean] => String
kinds.clear()
@ -852,7 +892,9 @@ class ResponseTypeManager(common_rust.TypeManager):
struct_class = self.data_type_mapping[model.Struct]
mod = struct_class(
name=self.get_model_name(type_model.reference),
description=common_rust.sanitize_rust_docstrings(type_model.description),
description=common_rust.sanitize_rust_docstrings(
type_model.description
),
)
field_class = mod.field_type_class_
for field_name, field in type_model.fields.items():
@ -872,7 +914,9 @@ class ResponseTypeManager(common_rust.TypeManager):
f = field_class(
local_name=self.get_local_attribute_name(field_name),
remote_name=self.get_remote_attribute_name(field_name),
description=common_rust.sanitize_rust_docstrings(field.description),
description=common_rust.sanitize_rust_docstrings(
field.description
),
data_type=field_data_type,
is_optional=not field.is_required,
is_nullable=is_nullable,
@ -962,18 +1006,13 @@ class RustCliGenerator(BaseGenerator):
)
parser.add_argument(
"--tests",
action="store_true",
help="Generate tests",
"--tests", action="store_true", help="Generate tests"
)
return parser
def _render_command(
self,
context: dict,
impl_template: str,
impl_dest: Path,
self, context: dict, impl_template: str, impl_dest: Path
):
"""Render command code"""
self._render(impl_template, context, impl_dest.parent, impl_dest.name)
@ -983,7 +1022,7 @@ class RustCliGenerator(BaseGenerator):
):
"""Generate code for the Rust openstack_cli"""
logging.debug(
"Generating Rust CLI code for `%s` in %s" % (operation_id, target_dir)
f"Generating Rust CLI code for `{operation_id}` in {target_dir}"
)
work_dir = Path(target_dir, "rust", "openstack_cli", "src")
@ -992,21 +1031,19 @@ class RustCliGenerator(BaseGenerator):
if not operation_id:
operation_id = args.openapi_operation_id
(path, method, spec) = common.find_openapi_operation(openapi_spec, operation_id)
(path, method, spec) = common.find_openapi_operation(
openapi_spec, operation_id
)
_, res_name = res.split(".") if res else (None, None)
resource_name = common.get_resource_names_from_url(path)[-1]
openapi_parser = model.OpenAPISchemaParser()
operation_params: list[model.RequestParameter] = []
sdk_mod_path_base = common.get_rust_sdk_mod_path(
args.service_type,
args.api_version,
args.module_path or path,
args.service_type, args.api_version, args.module_path or path
)
cli_mod_path = common.get_rust_cli_mod_path(
args.service_type,
args.api_version,
args.module_path or path,
args.service_type, args.api_version, args.module_path or path
)
target_class_name = resource_name
is_image_download: bool = False
@ -1014,12 +1051,12 @@ class RustCliGenerator(BaseGenerator):
global_additional_imports: set[str] = set()
# Collect all operation parameters
for param in openapi_spec["paths"][path].get("parameters", []) + spec.get(
for param in openapi_spec["paths"][path].get(
"parameters", []
):
if (("{" + param["name"] + "}") in path and param["in"] == "path") or param[
"in"
] != "path":
) + spec.get("parameters", []):
if (
("{" + param["name"] + "}") in path and param["in"] == "path"
) or param["in"] != "path":
# Respect path params that appear in path and not path params
param_ = openapi_parser.parse_parameter(param)
if param_.name in [
@ -1032,19 +1069,28 @@ class RustCliGenerator(BaseGenerator):
if param_.resource_link:
link_res_name: str = param_.resource_link.split(".")[0]
global_additional_imports.add("tracing::warn")
global_additional_imports.add("openstack_sdk::api::find_by_name")
global_additional_imports.add("openstack_sdk::api::QueryAsync")
global_additional_imports.add(
"openstack_sdk::api::find_by_name"
)
global_additional_imports.add(
"openstack_sdk::api::QueryAsync"
)
global_additional_imports.add(
f"openstack_sdk::api::{'::'.join(link_res_name.split('/'))}::find as find_{link_res_name.split('/')[-1]}"
)
global_additional_imports.add("eyre::OptionExt")
# List of operation variants (based on the body)
operation_variants = common.get_operation_variants(spec, args.operation_name)
operation_variants = common.get_operation_variants(
spec, args.operation_name
)
body_types: list[str] = []
last_path_parameter: RequestParameter | None = None
if args.operation_type == "download" and path == "/v2/images/{image_id}/file":
if (
args.operation_type == "download"
and path == "/v2/images/{image_id}/file"
):
is_image_download = True
if args.operation_type == "upload":
@ -1054,10 +1100,12 @@ class RustCliGenerator(BaseGenerator):
body_types = list(content.keys())
for operation_variant in operation_variants:
logging.debug("Processing variant %s" % operation_variant)
logging.debug(f"Processing variant {operation_variant}")
additional_imports = set(global_additional_imports)
type_manager: common_rust.TypeManager = RequestTypeManager()
response_type_manager: common_rust.TypeManager = ResponseTypeManager()
response_type_manager: common_rust.TypeManager = (
ResponseTypeManager()
)
result_is_list: bool = False
is_list_paginated: bool = False
if operation_params:
@ -1127,14 +1175,20 @@ class RustCliGenerator(BaseGenerator):
response = common.find_response_schema(
spec["responses"],
args.response_key or resource_name,
(args.operation_name if args.operation_type == "action" else None),
(
args.operation_name
if args.operation_type == "action"
else None
),
)
if response:
response_key: str
if args.response_key:
response_key = (
args.response_key if args.response_key != "null" else None
args.response_key
if args.response_key != "null"
else None
)
else:
response_key = resource_name
@ -1149,7 +1203,9 @@ class RustCliGenerator(BaseGenerator):
isinstance(response_def.get("type"), list)
and "object" in response_def["type"]
):
(root, response_types) = openapi_parser.parse(response_def)
(root, response_types) = openapi_parser.parse(
response_def
)
if isinstance(root, model.Dictionary):
value_type: (
common_rust.BasePrimitiveType
@ -1158,8 +1214,10 @@ class RustCliGenerator(BaseGenerator):
| None
) = None
try:
value_type = response_type_manager.convert_model(
root.value_type
value_type = (
response_type_manager.convert_model(
root.value_type
)
)
except Exception:
# In rare cases we can not conter
@ -1170,13 +1228,19 @@ class RustCliGenerator(BaseGenerator):
value_type = JsonValue()
# if not isinstance(value_type, common_rust.BasePrimitiveType):
# value_type = JsonValue(original_data_type=value_type)
root_dict = HashMapResponse(value_type=value_type)
root_dict = HashMapResponse(
value_type=value_type
)
response_type_manager.refs[
model.Reference(name="Body", type=HashMapResponse)
model.Reference(
name="Body", type=HashMapResponse
)
] = root_dict
else:
response_type_manager.set_models(response_types)
response_type_manager.set_models(
response_types
)
if method == "patch" and not request_types:
# image patch is a jsonpatch based operation
@ -1200,11 +1264,15 @@ class RustCliGenerator(BaseGenerator):
elif response_def["type"] == "string":
(root_dt, _) = openapi_parser.parse(response_def)
if not root_dt:
raise RuntimeError("Response data can not be processed")
raise RuntimeError(
"Response data can not be processed"
)
field = common_rust.StructField(
local_name="dummy",
remote_name="dummy",
data_type=response_type_manager.convert_model(root_dt),
data_type=response_type_manager.convert_model(
root_dt
),
is_optional=False,
)
tuple_struct = TupleStruct(name="Response")
@ -1213,7 +1281,8 @@ class RustCliGenerator(BaseGenerator):
model.Reference(name="Body", type=TupleStruct)
] = tuple_struct
elif (
response_def["type"] == "array" and "items" in response_def
response_def["type"] == "array"
and "items" in response_def
):
(_, response_types) = openapi_parser.parse(
response_def["items"]
@ -1223,9 +1292,9 @@ class RustCliGenerator(BaseGenerator):
response_props = response.get("properties", {})
if (
response_props
and response_props[list(response_props.keys())[0]].get(
"type"
)
and response_props[
list(response_props.keys())[0]
].get("type")
== "array"
):
result_is_list = True
@ -1238,11 +1307,7 @@ class RustCliGenerator(BaseGenerator):
if not (
args.find_implemented_by_sdk
and args.operation_type
in [
"show",
"download",
]
and args.operation_type in ["show", "download"]
):
additional_imports.add(mod_import_name)
@ -1267,7 +1332,9 @@ class RustCliGenerator(BaseGenerator):
if args.operation_type == "list":
# Make plural form for listing
target_class_name = common.get_plural_form(target_class_name)
target_class_name = common.get_plural_form(
target_class_name
)
if "limit" in [
k for (k, _) in type_manager.get_parameters("query")
]:
@ -1279,10 +1346,18 @@ class RustCliGenerator(BaseGenerator):
additional_imports.add("crate::common::download_file")
if args.operation_type == "upload":
additional_imports.add("crate::common::build_upload_asyncread")
additional_imports.add(
"crate::common::build_upload_asyncread"
)
if (
(isinstance(root_type, StructResponse) and root_type.fields)
or (isinstance(root_type, TupleStruct) and root_type.tuple_fields)
(
isinstance(root_type, StructResponse)
and root_type.fields
)
or (
isinstance(root_type, TupleStruct)
and root_type.tuple_fields
)
or (isinstance(root_type, common_rust.Dictionary))
):
additional_imports.add("openstack_sdk::api::QueryAsync")
@ -1295,10 +1370,18 @@ class RustCliGenerator(BaseGenerator):
additional_imports.add("structable_derive::StructTable")
if resource_header_metadata:
additional_imports.add("crate::common::HashMapStringString")
additional_imports.add(
"crate::common::HashMapStringString"
)
additional_imports.add("std::collections::HashMap")
if (
len([x for x in resource_header_metadata.keys() if "*" in x])
len(
[
x
for x in resource_header_metadata.keys()
if "*" in x
]
)
> 0
):
additional_imports.add("regex::Regex")
@ -1338,60 +1421,62 @@ class RustCliGenerator(BaseGenerator):
command_description = operation_body.get(
"description", command_description
)
command_summary = operation_body.get("summary", command_summary)
command_summary = operation_body.get(
"summary", command_summary
)
if command_summary and microversion:
command_summary += f" (microversion = {microversion})"
if not command_description:
command_description = "Command without description in OpenAPI"
context = dict(
operation_id=operation_id,
operation_type=args.operation_type,
command_description=common_rust.sanitize_rust_docstrings(
command_description = (
"Command without description in OpenAPI"
)
context = {
"operation_id": operation_id,
"operation_type": args.operation_type,
"command_description": common_rust.sanitize_rust_docstrings(
command_description
),
command_summary=common_rust.sanitize_rust_docstrings(
"command_summary": common_rust.sanitize_rust_docstrings(
command_summary
),
type_manager=type_manager,
resource_name=resource_name,
response_type_manager=response_type_manager,
target_class_name="".join(
"type_manager": type_manager,
"resource_name": resource_name,
"response_type_manager": response_type_manager,
"target_class_name": "".join(
x.title() for x in target_class_name.split("_")
),
sdk_struct_name="Request",
sdk_service_name=common.get_rust_service_type_from_str(
"sdk_struct_name": "Request",
"sdk_service_name": common.get_rust_service_type_from_str(
args.service_type
),
service_type=args.service_type,
url=path[1:] if path.startswith("/") else path,
method=method,
resource_key=None,
resource_header_metadata=resource_header_metadata,
sdk_mod_path=sdk_mod_path,
cli_mod_path=cli_mod_path,
result_def=result_def,
"service_type": args.service_type,
"url": path[1:] if path.startswith("/") else path,
"method": method,
"resource_key": None,
"resource_header_metadata": resource_header_metadata,
"sdk_mod_path": sdk_mod_path,
"cli_mod_path": cli_mod_path,
"result_def": result_def,
# Last path param is required for the download operation
last_path_parameter=last_path_parameter,
body_types=body_types,
additional_imports=additional_imports,
find_present=args.find_implemented_by_sdk,
microversion=microversion,
result_is_list=result_is_list,
is_image_download=is_image_download,
is_json_patch=is_json_patch,
is_list_paginated=is_list_paginated,
)
"last_path_parameter": last_path_parameter,
"body_types": body_types,
"additional_imports": additional_imports,
"find_present": args.find_implemented_by_sdk,
"microversion": microversion,
"result_is_list": result_is_list,
"is_image_download": is_image_download,
"is_json_patch": is_json_patch,
"is_list_paginated": is_list_paginated,
}
if not args.cli_mod_path:
# mod_name = args.operation_name or args.operation_type.value
impl_path = Path(work_dir, "/".join(cli_mod_path), f"{mod_name}.rs")
impl_path = Path(
work_dir, "/".join(cli_mod_path), f"{mod_name}.rs"
)
self._render_command(
context,
"rust_cli/impl.rs.j2",
impl_path,
)
self._render_command(context, "rust_cli/impl.rs.j2", impl_path)
self._format_code(impl_path)
if args.cli_full_command and True: # args.tests:
@ -1403,7 +1488,9 @@ class RustCliGenerator(BaseGenerator):
)
cmd = args.cli_full_command
if microversion:
cmd = args.cli_full_command + microversion.replace(".", "")
cmd = args.cli_full_command + microversion.replace(
".", ""
)
test_context = {
"service_type": args.service_type,

View File

@ -24,18 +24,18 @@ from codegenerator.common import rust as common_rust
class String(common_rust.String):
lifetimes: set[str] = set(["'a"])
lifetimes: set[str] = {"'a"}
type_hint: str = "Cow<'a, str>"
@property
def imports(self) -> set[str]:
return set(["std::borrow::Cow"])
return {"std::borrow::Cow"}
class Enum(common_rust.Enum):
@property
def builder_macros(self):
macros: set[str] = set(["setter(into)"])
macros: set[str] = {"setter(into)"}
return macros
@property
@ -51,14 +51,18 @@ class Enum(common_rust.Enum):
return "#[derive(Debug, Deserialize, Clone, Serialize)]"
def get_sample(self):
(first_kind_name, first_kind_val) = list(sorted(self.kinds.items()))[0]
(first_kind_name, first_kind_val) = sorted(self.kinds.items())[0]
res = (
self.name
+ "::"
+ first_kind_name
+ "("
+ first_kind_val.data_type.get_sample()
+ (".into()" if isinstance(first_kind_val.data_type, String) else "")
+ (
".into()"
if isinstance(first_kind_val.data_type, String)
else ""
)
+ ")"
)
return res
@ -67,7 +71,7 @@ class Enum(common_rust.Enum):
class StructField(common_rust.StructField):
@property
def builder_macros(self):
macros: set[str] = set([])
macros: set[str] = set()
if not isinstance(self.data_type, BaseCompoundType):
macros.update(self.data_type.builder_macros)
elif not isinstance(self.data_type, common_rust.StringEnum):
@ -86,7 +90,7 @@ class StructField(common_rust.StructField):
@property
def serde_macros(self):
macros = set([])
macros = set()
if self.local_name != self.remote_name:
macros.add(f'rename="{self.remote_name}"')
if self.is_optional:
@ -145,7 +149,7 @@ class Struct(common_rust.Struct):
class BTreeMap(common_rust.Dictionary):
builder_macros: set[str] = set(["private"])
builder_macros: set[str] = {"private"}
requires_builder_private_setter: bool = True
@property
@ -154,13 +158,13 @@ class BTreeMap(common_rust.Dictionary):
@property
def imports(self):
imports = set(["std::collections::BTreeMap"])
imports = {"std::collections::BTreeMap"}
imports.update(self.value_type.imports)
return imports
@property
def lifetimes(self):
lt = set(["'a"])
lt = {"'a"}
if self.value_type.lifetimes:
lt.update(self.value_type.lifetimes)
return lt
@ -182,7 +186,9 @@ class BTreeMap(common_rust.Dictionary):
f".map(|(k, v)| (k, v.into_iter()))"
)
else:
type_hint = self.value_type.type_hint.replace("Cow<'a, str>", "String")
type_hint = self.value_type.type_hint.replace(
"Cow<'a, str>", "String"
)
return f"BTreeMap::<String, {type_hint}>::new().into_iter()"
def get_mandatory_init(self):
@ -190,7 +196,7 @@ class BTreeMap(common_rust.Dictionary):
class BTreeSet(common_rust.BTreeSet):
builder_macros: set[str] = set(["private"])
builder_macros: set[str] = {"private"}
requires_builder_private_setter: bool = True
@ -201,7 +207,7 @@ class CommaSeparatedList(common_rust.CommaSeparatedList):
@property
def imports(self):
imports: set[str] = set([])
imports: set[str] = set()
imports.add("crate::api::common::CommaSeparatedList")
imports.update(self.item_type.imports)
return imports
@ -241,7 +247,9 @@ class TypeManager(common_rust.TypeManager):
model.CommaSeparatedList: CommaSeparatedList,
}
request_parameter_class: Type[common_rust.RequestParameter] = RequestParameter
request_parameter_class: Type[common_rust.RequestParameter] = (
RequestParameter
)
def set_parameters(self, parameters: list[model.RequestParameter]) -> None:
"""Set OpenAPI operation parameters into typemanager for conversion"""
@ -285,10 +293,7 @@ class RustSdkGenerator(BaseGenerator):
return parser
def _render_command(
self,
context: dict,
impl_template: str,
impl_dest: Path,
self, context: dict, impl_template: str, impl_dest: Path
):
"""Render command code"""
self._render(impl_template, context, impl_dest.parent, impl_dest.name)
@ -308,7 +313,9 @@ class RustSdkGenerator(BaseGenerator):
openapi_spec = common.get_openapi_spec(args.openapi_yaml_spec)
if not operation_id:
operation_id = args.openapi_operation_id
(path, method, spec) = common.find_openapi_operation(openapi_spec, operation_id)
(path, method, spec) = common.find_openapi_operation(
openapi_spec, operation_id
)
if args.operation_type == "find":
yield self.generate_find_mod(
target_dir,
@ -334,12 +341,12 @@ class RustSdkGenerator(BaseGenerator):
type_manager: TypeManager | None = None
is_json_patch: bool = False
# Collect all operation parameters
for param in openapi_spec["paths"][path].get("parameters", []) + spec.get(
for param in openapi_spec["paths"][path].get(
"parameters", []
):
if (("{" + param["name"] + "}") in path and param["in"] == "path") or param[
"in"
] != "path":
) + spec.get("parameters", []):
if (
("{" + param["name"] + "}") in path and param["in"] == "path"
) or param["in"] != "path":
# Respect path params that appear in path and not path params
param_ = openapi_parser.parse_parameter(param)
if param_.name in [
@ -353,7 +360,9 @@ class RustSdkGenerator(BaseGenerator):
# Process body information
# List of operation variants (based on the body)
operation_variants = common.get_operation_variants(spec, args.operation_name)
operation_variants = common.get_operation_variants(
spec, args.operation_name
)
api_ver_matches: re.Match | None = None
path_elements = path.lstrip("/").split("/")
@ -366,7 +375,7 @@ class RustSdkGenerator(BaseGenerator):
ver_prefix = path_elements[0]
for operation_variant in operation_variants:
logging.debug("Processing variant %s" % operation_variant)
logging.debug(f"Processing variant {operation_variant}")
# TODO(gtema): if we are in MV variants filter out unsupported query
# parameters
# TODO(gtema): previously we were ensuring `router_id` path param
@ -449,13 +458,12 @@ class RustSdkGenerator(BaseGenerator):
if "application/json" in content:
response_spec = content["application/json"]
try:
(
_,
response_key,
) = common.find_resource_schema(
response_spec["schema"],
None,
res_name.lower(),
(_, response_key) = (
common.find_resource_schema(
response_spec["schema"],
None,
res_name.lower(),
)
)
except Exception:
# Most likely we have response which is oneOf.
@ -465,42 +473,35 @@ class RustSdkGenerator(BaseGenerator):
# response_def = (None,)
response_key = None
context = dict(
operation_id=operation_id,
operation_type=spec.get(
context = {
"operation_id": operation_id,
"operation_type": spec.get(
"x-openstack-operation-type", args.operation_type
),
command_description=common_rust.sanitize_rust_docstrings(
"command_description": common_rust.sanitize_rust_docstrings(
common.make_ascii_string(spec.get("description"))
),
class_name=class_name,
sdk_service_name=common.get_rust_service_type_from_str(
"class_name": class_name,
"sdk_service_name": common.get_rust_service_type_from_str(
args.service_type
),
url=path.lstrip("/").lstrip(ver_prefix).lstrip("/"),
method=method,
type_manager=type_manager,
response_key=response_key,
response_list_item_key=args.response_list_item_key,
mime_type=mime_type,
is_json_patch=is_json_patch,
api_ver=api_ver,
)
"url": path.lstrip("/").lstrip(ver_prefix).lstrip("/"),
"method": method,
"type_manager": type_manager,
"response_key": response_key,
"response_list_item_key": args.response_list_item_key,
"mime_type": mime_type,
"is_json_patch": is_json_patch,
"api_ver": api_ver,
}
work_dir = Path(target_dir, "rust", "openstack_sdk", "src")
impl_path = Path(
work_dir,
"api",
"/".join(mod_path),
f"{mod_name}.rs",
work_dir, "api", "/".join(mod_path), f"{mod_name}.rs"
)
# Generate methods for the GET resource command
self._render_command(
context,
"rust_sdk/impl.rs.j2",
impl_path,
)
self._render_command(context, "rust_sdk/impl.rs.j2", impl_path)
self._format_code(impl_path)
@ -512,26 +513,19 @@ class RustSdkGenerator(BaseGenerator):
"""Generate collection module (include individual modules)"""
work_dir = Path(target_dir, "rust", "openstack_sdk", "src")
impl_path = Path(
work_dir,
"api",
"/".join(mod_path[0:-1]),
f"{mod_path[-1]}.rs",
work_dir, "api", "/".join(mod_path[0:-1]), f"{mod_path[-1]}.rs"
)
context = dict(
mod_list=mod_list,
mod_path=mod_path,
url=url,
resource_name=resource_name,
service_name=service_name,
)
context = {
"mod_list": mod_list,
"mod_path": mod_path,
"url": url,
"resource_name": resource_name,
"service_name": service_name,
}
# Generate methods for the GET resource command
self._render_command(
context,
"rust_sdk/mod.rs.j2",
impl_path,
)
self._render_command(context, "rust_sdk/mod.rs.j2", impl_path)
self._format_code(impl_path)
@ -550,12 +544,7 @@ class RustSdkGenerator(BaseGenerator):
):
"""Generate `find` operation module"""
work_dir = Path(target_dir, "rust", "openstack_sdk", "src")
impl_path = Path(
work_dir,
"api",
"/".join(mod_path),
"find.rs",
)
impl_path = Path(work_dir, "api", "/".join(mod_path), "find.rs")
# Collect all operation parameters
openapi_parser = model.OpenAPISchemaParser()
path_resources = common.get_resource_names_from_url(path)
@ -563,9 +552,9 @@ class RustSdkGenerator(BaseGenerator):
operation_path_params: list[model.RequestParameter] = []
operation_query_params: list[model.RequestParameter] = []
for param in openapi_spec["paths"][path].get("parameters", []) + spec.get(
for param in openapi_spec["paths"][path].get(
"parameters", []
):
) + spec.get("parameters", []):
if ("{" + param["name"] + "}") in path and param["in"] == "path":
# Respect path params that appear in path and not in path params
param_ = openapi_parser.parse_parameter(param)
@ -583,24 +572,22 @@ class RustSdkGenerator(BaseGenerator):
type_manager = TypeManager()
type_manager.set_parameters(operation_path_params)
context = dict(
mod_path=mod_path,
resource_name=resource_name,
list_mod=list_mod,
name_filter_supported=name_filter_supported,
name_field=name_field,
type_manager=type_manager,
list_lifetime=(
"<'a>" if operation_query_params or operation_path_params else ""
context = {
"mod_path": mod_path,
"resource_name": resource_name,
"list_mod": list_mod,
"name_filter_supported": name_filter_supported,
"name_field": name_field,
"type_manager": type_manager,
"list_lifetime": (
"<'a>"
if operation_query_params or operation_path_params
else ""
),
)
}
# Generate methods for the GET resource command
self._render_command(
context,
"rust_sdk/find.rs.j2",
impl_path,
)
self._render_command(context, "rust_sdk/find.rs.j2", impl_path)
self._format_code(impl_path)

View File

@ -31,5 +31,7 @@ class TestGenerator(TestCase):
generator.generate(work_dir.name, Args(validate=True))
self.assertTrue(
Path(work_dir.name, "openapi_specs", "block-storage", "v3.yaml").exists()
Path(
work_dir.name, "openapi_specs", "block-storage", "v3.yaml"
).exists()
)

View File

@ -31,5 +31,7 @@ class TestGenerator(TestCase):
generator.generate(work_dir.name, Args(validate=True))
self.assertTrue(
Path(work_dir.name, "openapi_specs", "identity", "v3.yaml").exists()
Path(
work_dir.name, "openapi_specs", "identity", "v3.yaml"
).exists()
)

View File

@ -31,5 +31,7 @@ class TestGenerator(TestCase):
generator.generate(work_dir.name, Args(validate=True))
self.assertTrue(
Path(work_dir.name, "openapi_specs", "load-balancing", "v2.yaml").exists()
Path(
work_dir.name, "openapi_specs", "load-balancing", "v2.yaml"
).exists()
)

View File

@ -81,7 +81,9 @@ class TestFindResponseSchema(TestCase):
},
"204": {
"content": {
"application/json": {"schema": {"oneOf": [foo_action, bar_action]}}
"application/json": {
"schema": {"oneOf": [foo_action, bar_action]}
}
}
},
}
@ -94,7 +96,7 @@ class TestFindResponseSchema(TestCase):
common.find_response_schema(responses, "foo", "bar-action"),
)
self.assertIsNone(
common.find_response_schema(responses, "foo", "baz-action"),
common.find_response_schema(responses, "foo", "baz-action")
)
self.assertEqual(
responses["200"]["content"]["application/json"]["schema"],
@ -104,11 +106,7 @@ class TestFindResponseSchema(TestCase):
def test_no_candidates_returns_root(self):
responses = {
"200": {
"content": {
"application/json": {
"schema": self.FOO["foo"],
}
}
"content": {"application/json": {"schema": self.FOO["foo"]}}
}
}
self.assertEqual(

View File

@ -135,12 +135,7 @@ SAMPLE_SERVER_SCHEMA = {
},
"delete_on_termination": {
"type": ["boolean", "string"],
"enum": [
True,
"True",
False,
"False",
],
"enum": [True, "True", False, "False"],
},
"no_device": {},
"connection_info": {
@ -179,12 +174,7 @@ SAMPLE_SERVER_SCHEMA = {
},
"delete_on_termination": {
"type": ["boolean", "string"],
"enum": [
True,
"True",
False,
"False",
],
"enum": [True, "True", False, "False"],
},
"no_device": {},
"connection_info": {
@ -410,7 +400,9 @@ EXPECTED_TLA_DATA = model.Struct(
min_ver="2.94",
),
"os:scheduler_hints": model.StructField(
data_type=model.Reference(name="os:scheduler_hints", type=model.Struct),
data_type=model.Reference(
name="os:scheduler_hints", type=model.Struct
),
description="scheduler hints description",
min_ver="2.94",
),
@ -434,10 +426,7 @@ EXPECTED_DATA_TYPES = [
),
model.OneOfType(
reference=model.Reference(name="flavorRef", type=model.OneOfType),
kinds=[
model.ConstraintString(minLength=1),
model.ConstraintInteger(),
],
kinds=[model.ConstraintString(minLength=1), model.ConstraintInteger()],
min_ver="2.94",
),
model.Dictionary(
@ -456,28 +445,27 @@ EXPECTED_DATA_TYPES = [
),
model.OneOfType(
reference=model.Reference(name="port", type=model.OneOfType),
kinds=[
model.ConstraintString(format="uuid"),
model.PrimitiveNull(),
],
kinds=[model.ConstraintString(format="uuid"), model.PrimitiveNull()],
min_ver="2.94",
),
model.Struct(
reference=model.Reference(name="networks", type=model.Struct),
fields={
"fixed_ip": model.StructField(
data_type=model.Reference(name="fixed_ip", type=model.OneOfType),
data_type=model.Reference(
name="fixed_ip", type=model.OneOfType
)
),
"port": model.StructField(
data_type=model.Reference(name="port", type=model.OneOfType),
data_type=model.Reference(name="port", type=model.OneOfType)
),
"uuid": model.StructField(
data_type=model.ConstraintString(format="uuid"),
data_type=model.ConstraintString(format="uuid")
),
"tag": model.StructField(
data_type=model.ConstraintString(
minLength=1, maxLength=60, pattern="^[^,/]*$"
),
)
),
},
min_ver="2.94",
@ -519,43 +507,45 @@ EXPECTED_DATA_TYPES = [
# ],
# ),
model.Enum(
reference=model.Reference(name="delete_on_termination", type=model.Enum),
reference=model.Reference(
name="delete_on_termination", type=model.Enum
),
literals=[True, "True", False, "False"],
base_types=[model.ConstraintString, model.PrimitiveBoolean],
min_ver="2.94",
),
model.Struct(
reference=model.Reference(name="block_device_mapping", type=model.Struct),
reference=model.Reference(
name="block_device_mapping", type=model.Struct
),
fields={
"virtual_name": model.StructField(
data_type=model.ConstraintString(maxLength=255),
data_type=model.ConstraintString(maxLength=255)
),
"volume_id": model.StructField(
data_type=model.ConstraintString(format="uuid"),
data_type=model.ConstraintString(format="uuid")
),
"snapshot_id": model.StructField(
data_type=model.ConstraintString(format="uuid"),
data_type=model.ConstraintString(format="uuid")
),
"volume_size": model.StructField(
data_type=model.Reference(name="volume_size", type=model.OneOfType),
data_type=model.Reference(
name="volume_size", type=model.OneOfType
)
),
"device_name": model.StructField(
data_type=model.ConstraintString(
minLength=1,
maxLength=255,
pattern="^[a-zA-Z0-9._-r/]*$",
),
minLength=1, maxLength=255, pattern="^[a-zA-Z0-9._-r/]*$"
)
),
"delete_on_termination": model.StructField(
data_type=model.Reference(
name="delete_on_termination", type=model.Enum
),
),
"no_device": model.StructField(
data_type=model.PrimitiveNull(),
)
),
"no_device": model.StructField(data_type=model.PrimitiveNull()),
"connection_info": model.StructField(
data_type=model.ConstraintString(maxLength=16777215),
data_type=model.ConstraintString(maxLength=16777215)
),
},
min_ver="2.94",
@ -586,122 +576,110 @@ EXPECTED_DATA_TYPES = [
min_ver="2.94",
),
model.Struct(
reference=model.Reference(name="block_device_mapping_v2", type=model.Struct),
reference=model.Reference(
name="block_device_mapping_v2", type=model.Struct
),
fields={
"virtual_name": model.StructField(
data_type=model.ConstraintString(maxLength=255),
data_type=model.ConstraintString(maxLength=255)
),
"volume_id": model.StructField(
data_type=model.ConstraintString(
format="uuid",
),
data_type=model.ConstraintString(format="uuid")
),
"snapshot_id": model.StructField(
data_type=model.ConstraintString(
format="uuid",
),
data_type=model.ConstraintString(format="uuid")
),
"volume_size": model.StructField(
data_type=model.Reference(name="volume_size", type=model.OneOfType),
data_type=model.Reference(
name="volume_size", type=model.OneOfType
)
),
"device_name": model.StructField(
data_type=model.ConstraintString(
minLength=1,
maxLength=255,
pattern="^[a-zA-Z0-9._-r/]*$",
),
minLength=1, maxLength=255, pattern="^[a-zA-Z0-9._-r/]*$"
)
),
"delete_on_termination": model.StructField(
data_type=model.Reference(
name="delete_on_termination", type=model.Enum
),
),
"no_device": model.StructField(
data_type=model.PrimitiveNull(),
)
),
"no_device": model.StructField(data_type=model.PrimitiveNull()),
"connection_info": model.StructField(
data_type=model.ConstraintString(
maxLength=16777215,
),
data_type=model.ConstraintString(maxLength=16777215)
),
"source_type": model.StructField(
data_type=model.Reference(name="source_type", type=model.Enum)
),
"uuid": model.StructField(
data_type=model.ConstraintString(
minLength=1,
maxLength=255,
pattern="^[a-zA-Z0-9._-]*$",
),
minLength=1, maxLength=255, pattern="^[a-zA-Z0-9._-]*$"
)
),
"image_id": model.StructField(
data_type=model.ConstraintString(
format="uuid",
),
data_type=model.ConstraintString(format="uuid")
),
"destination_type": model.StructField(
data_type=model.Reference(name="destination_type", type=model.Enum)
data_type=model.Reference(
name="destination_type", type=model.Enum
)
),
"guest_format": model.StructField(
data_type=model.ConstraintString(
maxLength=255,
),
data_type=model.ConstraintString(maxLength=255)
),
"device_type": model.StructField(
data_type=model.ConstraintString(
maxLength=255,
),
data_type=model.ConstraintString(maxLength=255)
),
"disk_bus": model.StructField(
data_type=model.ConstraintString(
maxLength=255,
),
data_type=model.ConstraintString(maxLength=255)
),
"boot_index": model.StructField(
data_type=model.Reference(name="boot_index", type=model.OneOfType),
data_type=model.Reference(
name="boot_index", type=model.OneOfType
)
),
"tag": model.StructField(
data_type=model.ConstraintString(
minLength=1,
maxLength=60,
pattern="^[^,/]*$",
),
minLength=1, maxLength=60, pattern="^[^,/]*$"
)
),
"volume_type": model.StructField(
data_type=model.Reference(name="volume_type", type=model.OneOfType),
data_type=model.Reference(
name="volume_type", type=model.OneOfType
)
),
},
min_ver="2.94",
),
model.Array(
reference=model.Reference(name="block_device_mapping", type=model.Array),
item_type=model.Reference(name="block_device_mapping", type=model.Struct),
reference=model.Reference(
name="block_device_mapping", type=model.Array
),
item_type=model.Reference(
name="block_device_mapping", type=model.Struct
),
min_ver="2.94",
),
model.Array(
reference=model.Reference(name="block_device_mapping_v2", type=model.Array),
item_type=model.Reference(name="block_device_mapping_v2", type=model.Struct),
reference=model.Reference(
name="block_device_mapping_v2", type=model.Array
),
item_type=model.Reference(
name="block_device_mapping_v2", type=model.Struct
),
min_ver="2.94",
),
model.Enum(
reference=model.Reference(name="config_drive", type=model.Enum),
base_types=[
model.PrimitiveBoolean,
model.ConstraintString,
],
literals=set(["No", "no", False]),
base_types=[model.PrimitiveBoolean, model.ConstraintString],
literals={"No", "no", False},
min_ver="2.94",
),
model.OneOfType(
reference=model.Reference(name="min_count", type=model.OneOfType),
kinds=[
model.ConstraintInteger(
minimum=1,
),
model.ConstraintString(
minLength=1,
pattern="^[0-9]*$",
),
model.ConstraintInteger(minimum=1),
model.ConstraintString(minLength=1, pattern="^[0-9]*$"),
],
min_ver="2.94",
),
@ -726,9 +704,7 @@ EXPECTED_DATA_TYPES = [
reference=model.Reference(name="description", type=model.OneOfType),
kinds=[
model.ConstraintString(
minLength=0,
maxLength=255,
pattern="regex_pattern",
minLength=0, maxLength=255, pattern="regex_pattern"
),
model.PrimitiveNull(),
],
@ -742,7 +718,9 @@ EXPECTED_DATA_TYPES = [
min_ver="2.94",
),
model.Array(
reference=model.Reference(name="trusted_image_certificates", type=model.Array),
reference=model.Reference(
name="trusted_image_certificates", type=model.Array
),
item_type=model.ConstraintString(format=None, minLength=1),
min_ver="2.94",
),
@ -751,7 +729,9 @@ EXPECTED_DATA_TYPES = [
name="trusted_image_certificates", type=model.OneOfType
),
kinds=[
model.Reference(name="trusted_image_certificates", type=model.Array),
model.Reference(
name="trusted_image_certificates", type=model.Array
),
model.PrimitiveNull(),
],
min_ver="2.94",
@ -769,11 +749,15 @@ EXPECTED_DATA_TYPES = [
min_ver="2.94",
),
"imageRef": model.StructField(
data_type=model.Reference(name="imageRef", type=model.OneOfType),
data_type=model.Reference(
name="imageRef", type=model.OneOfType
),
min_ver="2.94",
),
"flavorRef": model.StructField(
data_type=model.Reference(name="flavorRef", type=model.OneOfType),
data_type=model.Reference(
name="flavorRef", type=model.OneOfType
),
is_required=True,
min_ver="2.94",
),
@ -781,18 +765,24 @@ EXPECTED_DATA_TYPES = [
data_type=model.ConstraintString(format=None), min_ver="2.94"
),
"metadata": model.StructField(
data_type=model.Reference(name="metadata", type=model.Dictionary),
data_type=model.Reference(
name="metadata", type=model.Dictionary
),
description="metadata description",
min_ver="2.94",
),
"networks": model.StructField(
data_type=model.Reference(name="networks", type=model.OneOfType),
data_type=model.Reference(
name="networks", type=model.OneOfType
),
description="Networks description",
is_required=True,
min_ver="2.94",
),
"OS-DCF:diskConfig": model.StructField(
data_type=model.Reference(name="OS-DCF:diskConfig", type=model.Enum),
data_type=model.Reference(
name="OS-DCF:diskConfig", type=model.Enum
),
description="DiskConfig description",
min_ver="2.94",
),
@ -822,28 +812,35 @@ EXPECTED_DATA_TYPES = [
min_ver="2.94",
),
"config_drive": model.StructField(
data_type=model.Reference(name="config_drive", type=model.Enum),
data_type=model.Reference(
name="config_drive", type=model.Enum
),
min_ver="2.94",
),
"min_count": model.StructField(
data_type=model.Reference(name="min_count", type=model.OneOfType),
data_type=model.Reference(
name="min_count", type=model.OneOfType
),
min_ver="2.94",
),
"security_groups": model.StructField(
data_type=model.Reference(name="security_groups", type=model.Array),
data_type=model.Reference(
name="security_groups", type=model.Array
),
description="SG descr",
min_ver="2.94",
),
"user_data": model.StructField(
data_type=model.ConstraintString(
format="base64",
maxLength=65535,
format="base64", maxLength=65535
),
description="user data",
min_ver="2.94",
),
"description": model.StructField(
data_type=model.Reference(name="description", type=model.OneOfType),
data_type=model.Reference(
name="description", type=model.OneOfType
),
min_ver="2.94",
),
"tags": model.StructField(
@ -858,25 +855,19 @@ EXPECTED_DATA_TYPES = [
),
"host": model.StructField(
data_type=model.ConstraintString(
minLength=1,
maxLength=255,
pattern="^[a-zA-Z0-9-._]*$",
minLength=1, maxLength=255, pattern="^[a-zA-Z0-9-._]*$"
),
min_ver="2.94",
),
"hypervisor_hostname": model.StructField(
data_type=model.ConstraintString(
minLength=1,
maxLength=255,
pattern="^[a-zA-Z0-9-._]*$",
minLength=1, maxLength=255, pattern="^[a-zA-Z0-9-._]*$"
),
min_ver="2.94",
),
"hostname": model.StructField(
data_type=model.ConstraintString(
minLength=1,
maxLength=255,
pattern="^[a-zA-Z0-9-._]*$",
minLength=1, maxLength=255, pattern="^[a-zA-Z0-9-._]*$"
),
min_ver="2.94",
),
@ -936,7 +927,9 @@ EXPECTED_DATA_TYPES = [
min_ver="2.94",
),
model.OneOfType(
reference=model.Reference(name="build_near_host_ip", type=model.OneOfType),
reference=model.Reference(
name="build_near_host_ip", type=model.OneOfType
),
kinds=[
model.ConstraintString(format="ipv4"),
model.ConstraintString(format="ipv6"),
@ -944,18 +937,24 @@ EXPECTED_DATA_TYPES = [
min_ver="2.94",
),
model.Struct(
reference=model.Reference(name="os:scheduler_hints", type=model.Struct),
reference=model.Reference(
name="os:scheduler_hints", type=model.Struct
),
description="scheduler hints description",
fields={
"group": model.StructField(
data_type=model.ConstraintString(format="uuid"), min_ver="2.94"
),
"different_host": model.StructField(
data_type=model.Reference(name="different_host", type=model.OneOfType),
data_type=model.Reference(
name="different_host", type=model.OneOfType
),
min_ver="2.94",
),
"same_host": model.StructField(
data_type=model.Reference(name="same_host", type=model.OneOfType),
data_type=model.Reference(
name="same_host", type=model.OneOfType
),
description="A list of server UUIDs or a server UUID.",
min_ver="2.94",
),
@ -970,7 +969,9 @@ EXPECTED_DATA_TYPES = [
min_ver="2.94",
),
"different_cell": model.StructField(
data_type=model.Reference(name="different_cell", type=model.OneOfType),
data_type=model.Reference(
name="different_cell", type=model.OneOfType
),
min_ver="2.94",
),
"build_near_host_ip": model.StructField(
@ -981,9 +982,7 @@ EXPECTED_DATA_TYPES = [
min_ver="2.94",
),
"cidr": model.StructField(
data_type=model.ConstraintString(
pattern="^/[0-9a-f.:]+$",
),
data_type=model.ConstraintString(pattern="^/[0-9a-f.:]+$"),
min_ver="2.94",
),
},
@ -1031,30 +1030,26 @@ EXPECTED_DATA_TYPES = [
),
model.Enum(
reference=model.Reference(name="source_type", type=model.Enum),
literals=set(["volume", "image", "snapshot", "blank"]),
base_types=[
model.ConstraintString,
],
literals={"volume", "image", "snapshot", "blank"},
base_types=[model.ConstraintString],
min_ver="2.94",
),
model.Enum(
reference=model.Reference(name="destination_type", type=model.Enum),
literals=set(["volume", "local"]),
base_types=[
model.ConstraintString,
],
literals={"volume", "local"},
base_types=[model.ConstraintString],
min_ver="2.94",
),
model.Enum(
reference=model.Reference(name="OS-DCF:diskConfig", type=model.Enum),
literals=set(["AUTO", "MANUAL"]),
base_types=[
model.ConstraintString,
],
literals={"AUTO", "MANUAL"},
base_types=[model.ConstraintString],
min_ver="2.94",
),
model.OneOfType(
reference=model.Reference(name="build_near_host_ip", type=model.OneOfType),
reference=model.Reference(
name="build_near_host_ip", type=model.OneOfType
),
kinds=[
model.ConstraintString(format="ipv4"),
model.ConstraintString(format="ipv6"),
@ -1062,18 +1057,24 @@ EXPECTED_DATA_TYPES = [
min_ver="2.94",
),
model.Struct(
reference=model.Reference(name="OS-SCH-HNT:scheduler_hints", type=model.Struct),
reference=model.Reference(
name="OS-SCH-HNT:scheduler_hints", type=model.Struct
),
fields={
"group": model.StructField(
data_type=model.ConstraintString(format="uuid"), min_ver="2.94"
),
"different_host": model.StructField(
data_type=model.Reference(name="different_host", type=model.OneOfType),
data_type=model.Reference(
name="different_host", type=model.OneOfType
),
description="A list of server UUIDs or a server UUID.\nSchedule the server on a different host from a set of servers.\nIt is available when `DifferentHostFilter` is available on cloud side.",
min_ver="2.94",
),
"same_host": model.StructField(
data_type=model.Reference(name="same_host", type=model.OneOfType),
data_type=model.Reference(
name="same_host", type=model.OneOfType
),
min_ver="2.94",
),
"query": model.StructField(
@ -1087,7 +1088,9 @@ EXPECTED_DATA_TYPES = [
min_ver="2.94",
),
"different_cell": model.StructField(
data_type=model.Reference(name="different_cell", type=model.OneOfType),
data_type=model.Reference(
name="different_cell", type=model.OneOfType
),
min_ver="2.94",
),
"build_near_host_ip": model.StructField(
@ -1097,9 +1100,7 @@ EXPECTED_DATA_TYPES = [
min_ver="2.94",
),
"cidr": model.StructField(
data_type=model.ConstraintString(
pattern="^/[0-9a-f.:]+$",
),
data_type=model.ConstraintString(pattern="^/[0-9a-f.:]+$"),
min_ver="2.94",
),
},
@ -1136,10 +1137,7 @@ class TestModel(TestCase):
schema = {
"in": "query",
"name": "tags",
"schema": {
"type": "string",
"format": "regex",
},
"schema": {"type": "string", "format": "regex"},
"x-openstack": {"min-ver": "2.26"},
}
parser = model.OpenAPISchemaParser()

View File

@ -27,7 +27,11 @@ class TestParserObject(TestCase):
(res, all) = self.parser.parse(schema)
self.assertEqual(
model.Struct(
fields={"foo": model.StructField(data_type=model.ConstraintString())}
fields={
"foo": model.StructField(
data_type=model.ConstraintString()
)
}
),
res,
)
@ -36,15 +40,17 @@ class TestParserObject(TestCase):
def test_parse_props_additional_props_forbidden(self):
schema = {
"type": "object",
"properties": {
"foo": {"type": "string"},
},
"properties": {"foo": {"type": "string"}},
"additionalProperties": False,
}
(res, all) = self.parser.parse(schema)
self.assertEqual(
model.Struct(
fields={"foo": model.StructField(data_type=model.ConstraintString())}
fields={
"foo": model.StructField(
data_type=model.ConstraintString()
)
}
),
res,
)
@ -53,15 +59,17 @@ class TestParserObject(TestCase):
def test_parse_props_additional_props_allowed(self):
schema = {
"type": "object",
"properties": {
"foo": {"type": "string"},
},
"properties": {"foo": {"type": "string"}},
"additionalProperties": True,
}
(res, all) = self.parser.parse(schema)
self.assertEqual(
model.Struct(
fields={"foo": model.StructField(data_type=model.ConstraintString())},
fields={
"foo": model.StructField(
data_type=model.ConstraintString()
)
},
additional_fields=model.PrimitiveAny(),
),
res,
@ -71,15 +79,17 @@ class TestParserObject(TestCase):
def test_parse_props_additional_props_type(self):
schema = {
"type": "object",
"properties": {
"foo": {"type": "string"},
},
"properties": {"foo": {"type": "string"}},
"additionalProperties": {"type": "string"},
}
(res, all) = self.parser.parse(schema)
self.assertEqual(
model.Struct(
fields={"foo": model.StructField(data_type=model.ConstraintString())},
fields={
"foo": model.StructField(
data_type=model.ConstraintString()
)
},
additional_fields=model.ConstraintString(),
),
res,
@ -87,14 +97,10 @@ class TestParserObject(TestCase):
self.assertEqual(1, len(all))
def test_parse_only_additional_props(self):
schema = {
"type": "object",
"additionalProperties": {"type": "string"},
}
schema = {"type": "object", "additionalProperties": {"type": "string"}}
(res, all) = self.parser.parse(schema)
self.assertEqual(
model.Dictionary(value_type=model.ConstraintString()),
res,
model.Dictionary(value_type=model.ConstraintString()), res
)
self.assertEqual(1, len(all))
@ -184,18 +190,18 @@ class TestParserObject(TestCase):
def test_parse_pattern_props(self):
schema = {
"type": "object",
"properties": {
"foo": {"type": "string"},
},
"properties": {"foo": {"type": "string"}},
"patternProperties": {"^A": {"type": "string"}},
}
(res, all) = self.parser.parse(schema)
self.assertEqual(
model.Struct(
fields={"foo": model.StructField(data_type=model.ConstraintString())},
pattern_properties={
"^A": model.ConstraintString(),
fields={
"foo": model.StructField(
data_type=model.ConstraintString()
)
},
pattern_properties={"^A": model.ConstraintString()},
),
res,
)
@ -208,19 +214,15 @@ class TestParserObject(TestCase):
}
(res, all) = self.parser.parse(schema)
self.assertEqual(
model.Dictionary(value_type=model.ConstraintString()),
res,
model.Dictionary(value_type=model.ConstraintString()), res
)
self.assertEqual(1, len(all))
def test_parse_empty(self):
schema = {
"type": "object",
}
schema = {"type": "object"}
(res, all) = self.parser.parse(schema)
self.assertEqual(
model.Dictionary(value_type=model.PrimitiveAny()),
res,
model.Dictionary(value_type=model.PrimitiveAny()), res
)
self.assertEqual(1, len(all))
@ -228,16 +230,8 @@ class TestParserObject(TestCase):
schema = {
"type": "object",
"oneOf": [
{
"properties": {
"foo": {"type": "string"},
}
},
{
"properties": {
"bar": {"type": "string"},
}
},
{"properties": {"foo": {"type": "string"}}},
{"properties": {"bar": {"type": "string"}}},
],
}
(res, all) = self.parser.parse(schema)
@ -246,13 +240,17 @@ class TestParserObject(TestCase):
kinds=[
model.Struct(
fields={
"foo": model.StructField(data_type=model.ConstraintString())
},
"foo": model.StructField(
data_type=model.ConstraintString()
)
}
),
model.Struct(
fields={
"bar": model.StructField(data_type=model.ConstraintString())
},
"bar": model.StructField(
data_type=model.ConstraintString()
)
}
),
]
),
@ -265,11 +263,7 @@ class TestParserObject(TestCase):
schema = {
"type": "object",
"allOf": [
{
"properties": {
"foo": {"type": "string"},
}
},
{"properties": {"foo": {"type": "string"}}},
{
"properties": {
"foo": {"type": "string"},
@ -286,8 +280,10 @@ class TestParserObject(TestCase):
"foo": model.StructField(
data_type=model.ConstraintString(), is_required=True
),
"bar": model.StructField(data_type=model.ConstraintString()),
},
"bar": model.StructField(
data_type=model.ConstraintString()
),
}
),
res,
)

View File

@ -51,7 +51,7 @@ class TestRustSdkModel(TestCase):
),
"g": model.StructField(
data_type=model.Dictionary(
value_type=model.PrimitiveString(),
value_type=model.PrimitiveString()
)
),
},
@ -120,7 +120,7 @@ class TestRustSdkModel(TestCase):
self.assertEqual(
field.type_hint, "Option<BTreeMap<Cow<'a, str>, Cow<'a, str>>>"
)
self.assertEqual(set(["'a"]), mod.lifetimes)
self.assertEqual({"'a"}, mod.lifetimes)
def test_get_submodels(self):
logging.basicConfig(level=logging.DEBUG)
@ -128,15 +128,13 @@ class TestRustSdkModel(TestCase):
type_manager.set_models(test_model.EXPECTED_DATA_TYPES)
# res = type_manager.get_subtypes()
self.assertEqual(
set(
[
"std::collections::BTreeMap",
"std::borrow::Cow",
"serde::Deserialize",
"serde::Serialize",
"serde_json::Value",
]
),
{
"std::collections::BTreeMap",
"std::borrow::Cow",
"serde::Deserialize",
"serde::Serialize",
"serde_json::Value",
},
type_manager.get_imports(),
)

View File

@ -21,10 +21,7 @@ sys.path.insert(0, os.path.abspath("."))
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
"sphinx.ext.autodoc",
"openstackdocstheme",
]
extensions = ["sphinx.ext.autodoc", "openstackdocstheme"]
# openstackdocstheme options
openstackdocs_repo_name = "openstack/codegenerator"
@ -86,7 +83,7 @@ latex_documents = [
"OpenStackCodegenerator Documentation",
"OpenStack Foundation",
"manual",
),
)
]
# Allow deeper levels of nesting for \begin...\end stanzas

View File

@ -1,4 +1,3 @@
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
@ -45,11 +44,7 @@ from sphinx.util import logging
linklogger = logging.getLogger("sphinx.ext.extlinks")
linklogger.setLevel(40) # Ignore messages less severe than ERROR
extensions = [
"openstackdocstheme",
"reno.sphinxext",
"sphinx.ext.extlinks",
]
extensions = ["openstackdocstheme", "reno.sphinxext", "sphinx.ext.extlinks"]
# openstackdocstheme options
openstackdocs_repo_name = "openstack/codegenerator"
@ -132,9 +127,7 @@ html_theme = "openstackdocs"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
"display_toc": False,
}
html_theme_options = {"display_toc": False}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []