Switch to ruff
Change-Id: I849dd7c794d4c4a796e1a91c73dcf18a33d03d4d
This commit is contained in:
parent
f94c527051
commit
89c07638cf
@ -21,12 +21,18 @@ repos:
|
||||
rev: v1.1.1
|
||||
hooks:
|
||||
- id: doc8
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 24.2.0
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.5.5
|
||||
hooks:
|
||||
- id: black
|
||||
args: ['-l', '79']
|
||||
exclude: '^codegenerator/templates/.*$'
|
||||
- id: ruff
|
||||
args: ['--fix', '--unsafe-fixes']
|
||||
- id: ruff-format
|
||||
# - repo: https://github.com/psf/black
|
||||
# rev: 24.2.0
|
||||
# hooks:
|
||||
# - id: black
|
||||
# args: ['-l', '79']
|
||||
# exclude: '^codegenerator/templates/.*$'
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v1.4.1
|
||||
hooks:
|
||||
|
@ -39,9 +39,7 @@ class ResourceProcessor:
|
||||
def __init__(self, mod_name, class_name):
|
||||
self.mod_name = mod_name
|
||||
self.class_name = class_name
|
||||
self.class_plural_name = (
|
||||
class_name + "s" if class_name[:-1] != "y" else "ies"
|
||||
)
|
||||
self.class_plural_name = class_name + "s" if class_name[:-1] != "y" else "ies"
|
||||
|
||||
spec = importlib.util.find_spec(self.mod_name)
|
||||
if not spec:
|
||||
@ -123,9 +121,7 @@ class Generator:
|
||||
def get_openapi_spec(self, path: Path):
|
||||
logging.debug("Fetch %s", path)
|
||||
if path.as_posix() not in self.schemas:
|
||||
self.schemas[path.as_posix()] = common.get_openapi_spec(
|
||||
path.as_posix()
|
||||
)
|
||||
self.schemas[path.as_posix()] = common.get_openapi_spec(path.as_posix())
|
||||
return self.schemas[path.as_posix()]
|
||||
|
||||
def load_metadata(self, path: Path):
|
||||
@ -162,9 +158,7 @@ def main():
|
||||
],
|
||||
help="Target for which to generate code",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--work-dir", help="Working directory for the generated code"
|
||||
)
|
||||
parser.add_argument("--work-dir", help="Working directory for the generated code")
|
||||
parser.add_argument(
|
||||
"--alternative-module-path",
|
||||
help=("Optional new module path"),
|
||||
@ -252,14 +246,11 @@ def main():
|
||||
openapi_spec = generator.get_openapi_spec(
|
||||
Path(
|
||||
# metadata_path.parent,
|
||||
op_data.spec_file
|
||||
or res_data.spec_file,
|
||||
op_data.spec_file or res_data.spec_file,
|
||||
).resolve()
|
||||
)
|
||||
|
||||
for mod_path, mod_name, path in generators[
|
||||
args.target
|
||||
].generate(
|
||||
for mod_path, mod_name, path in generators[args.target].generate(
|
||||
res,
|
||||
args.work_dir,
|
||||
openapi_spec=openapi_spec,
|
||||
@ -290,16 +281,13 @@ def main():
|
||||
resource_results: dict[str, dict] = dict()
|
||||
for mod_path, mod_name, path in res_mods:
|
||||
mn = "/".join(mod_path)
|
||||
x = resource_results.setdefault(
|
||||
mn, {"path": path, "mods": set()}
|
||||
)
|
||||
x = resource_results.setdefault(mn, {"path": path, "mods": set()})
|
||||
x["mods"].add(mod_name)
|
||||
changed = True
|
||||
while changed:
|
||||
changed = False
|
||||
for mod_path in [
|
||||
mod_path_str.split("/")
|
||||
for mod_path_str in resource_results.keys()
|
||||
mod_path_str.split("/") for mod_path_str in resource_results.keys()
|
||||
]:
|
||||
if len(mod_path) < 3:
|
||||
continue
|
||||
|
@ -35,9 +35,7 @@ FQAN_ALIAS_MAP = {
|
||||
}
|
||||
|
||||
|
||||
def _deep_merge(
|
||||
dict1: dict[Any, Any], dict2: dict[Any, Any]
|
||||
) -> dict[Any, Any]:
|
||||
def _deep_merge(dict1: dict[Any, Any], dict2: dict[Any, Any]) -> dict[Any, Any]:
|
||||
result = dict1.copy()
|
||||
for key, value in dict2.items():
|
||||
if key in result:
|
||||
@ -176,11 +174,7 @@ def find_resource_schema(
|
||||
raise RuntimeError("No type in %s" % schema)
|
||||
schema_type = schema["type"]
|
||||
if schema_type == "array":
|
||||
if (
|
||||
parent
|
||||
and resource_name
|
||||
and parent == get_plural_form(resource_name)
|
||||
):
|
||||
if parent and resource_name and parent == get_plural_form(resource_name):
|
||||
items = schema["items"]
|
||||
if (
|
||||
items.get("type") == "object"
|
||||
@ -191,9 +185,7 @@ def find_resource_schema(
|
||||
return (items["properties"][resource_name], parent)
|
||||
else:
|
||||
return (items, parent)
|
||||
elif (
|
||||
not parent and schema.get("items", {}).get("type") == "object"
|
||||
):
|
||||
elif not parent and schema.get("items", {}).get("type") == "object":
|
||||
# Array on the top level. Most likely we are searching for items
|
||||
# directly
|
||||
return (schema["items"], None)
|
||||
@ -236,9 +228,7 @@ def find_resource_schema(
|
||||
else:
|
||||
return (schema, None)
|
||||
except Exception as ex:
|
||||
logging.exception(
|
||||
f"Caught exception {ex} during processing of {schema}"
|
||||
)
|
||||
logging.exception(f"Caught exception {ex} during processing of {schema}")
|
||||
raise
|
||||
return (None, None)
|
||||
|
||||
@ -285,9 +275,7 @@ def find_response_schema(
|
||||
for candidate in oneof:
|
||||
if (
|
||||
action_name
|
||||
and candidate.get("x-openstack", {}).get(
|
||||
"action-name"
|
||||
)
|
||||
and candidate.get("x-openstack", {}).get("action-name")
|
||||
== action_name
|
||||
):
|
||||
if response_key in candidate.get("properties", {}):
|
||||
@ -375,24 +363,17 @@ def get_operation_variants(spec: dict, operation_name: str):
|
||||
variant_spec = variant.get("x-openstack", {})
|
||||
if variant_spec.get("action-name") == operation_name:
|
||||
discriminator = variant_spec.get("discriminator")
|
||||
if (
|
||||
"oneOf" in variant
|
||||
and discriminator == "microversion"
|
||||
):
|
||||
if "oneOf" in variant and discriminator == "microversion":
|
||||
logging.debug(
|
||||
"Microversion discriminator for action bodies"
|
||||
)
|
||||
for subvariant in variant["oneOf"]:
|
||||
subvariant_spec = subvariant.get(
|
||||
"x-openstack", {}
|
||||
)
|
||||
subvariant_spec = subvariant.get("x-openstack", {})
|
||||
operation_variants.append(
|
||||
{
|
||||
"body": subvariant,
|
||||
"mode": "action",
|
||||
"min-ver": subvariant_spec.get(
|
||||
"min-ver"
|
||||
),
|
||||
"min-ver": subvariant_spec.get("min-ver"),
|
||||
"mime_type": mime_type,
|
||||
}
|
||||
)
|
||||
|
@ -253,9 +253,7 @@ class Struct(BaseCompoundType):
|
||||
|
||||
@property
|
||||
def type_hint(self):
|
||||
return self.name + (
|
||||
f"<{', '.join(self.lifetimes)}>" if self.lifetimes else ""
|
||||
)
|
||||
return self.name + (f"<{', '.join(self.lifetimes)}>" if self.lifetimes else "")
|
||||
|
||||
@property
|
||||
def imports(self):
|
||||
@ -315,9 +313,7 @@ class Enum(BaseCompoundType):
|
||||
|
||||
@property
|
||||
def type_hint(self):
|
||||
return self.name + (
|
||||
f"<{', '.join(self.lifetimes)}>" if self.lifetimes else ""
|
||||
)
|
||||
return self.name + (f"<{', '.join(self.lifetimes)}>" if self.lifetimes else "")
|
||||
|
||||
@property
|
||||
def imports(self):
|
||||
@ -346,9 +342,7 @@ class StringEnum(BaseCompoundType):
|
||||
variants: dict[str, set[str]] = {}
|
||||
imports: set[str] = set(["serde::Deserialize", "serde::Serialize"])
|
||||
lifetimes: set[str] = set()
|
||||
derive_container_macros: str = (
|
||||
"#[derive(Debug, Deserialize, Clone, Serialize)]"
|
||||
)
|
||||
derive_container_macros: str = "#[derive(Debug, Deserialize, Clone, Serialize)]"
|
||||
builder_container_macros: str | None = None
|
||||
serde_container_macros: str | None = None # "#[serde(untagged)]"
|
||||
serde_macros: set[str] | None = None
|
||||
@ -490,9 +484,7 @@ class TypeManager:
|
||||
def get_local_attribute_name(self, name: str) -> str:
|
||||
"""Get localized attribute name"""
|
||||
name = name.replace(".", "_")
|
||||
attr_name = "_".join(
|
||||
x.lower() for x in re.split(common.SPLIT_NAME_RE, name)
|
||||
)
|
||||
attr_name = "_".join(x.lower() for x in re.split(common.SPLIT_NAME_RE, name))
|
||||
if attr_name in ["type", "self", "enum", "ref", "default"]:
|
||||
attr_name = f"_{attr_name}"
|
||||
return attr_name
|
||||
@ -510,8 +502,7 @@ class TypeManager:
|
||||
if not model_ref:
|
||||
return "Request"
|
||||
name = "".join(
|
||||
x.capitalize()
|
||||
for x in re.split(common.SPLIT_NAME_RE, model_ref.name)
|
||||
x.capitalize() for x in re.split(common.SPLIT_NAME_RE, model_ref.name)
|
||||
)
|
||||
return name
|
||||
|
||||
@ -527,9 +518,7 @@ class TypeManager:
|
||||
) -> BasePrimitiveType | BaseCombinedType | BaseCompoundType:
|
||||
"""Get local destination type from the ModelType"""
|
||||
# logging.debug("Get RustSDK type for %s", type_model)
|
||||
typ: BasePrimitiveType | BaseCombinedType | BaseCompoundType | None = (
|
||||
None
|
||||
)
|
||||
typ: BasePrimitiveType | BaseCombinedType | BaseCompoundType | None = None
|
||||
model_ref: model.Reference | None = None
|
||||
if isinstance(type_model, model.Reference):
|
||||
model_ref = type_model
|
||||
@ -588,9 +577,7 @@ class TypeManager:
|
||||
val = "".join(
|
||||
[
|
||||
x.capitalize()
|
||||
for x in re.split(
|
||||
common.SPLIT_NAME_RE, lit
|
||||
)
|
||||
for x in re.split(common.SPLIT_NAME_RE, lit)
|
||||
]
|
||||
)
|
||||
if val and val[0].isdigit():
|
||||
@ -605,13 +592,9 @@ class TypeManager:
|
||||
variants=variants,
|
||||
)
|
||||
except Exception:
|
||||
logging.exception(
|
||||
"Error processing enum: %s", type_model
|
||||
)
|
||||
logging.exception("Error processing enum: %s", type_model)
|
||||
elif base_type is model.ConstraintInteger:
|
||||
typ = self.primitive_type_mapping[
|
||||
model.ConstraintInteger
|
||||
]()
|
||||
typ = self.primitive_type_mapping[model.ConstraintInteger]()
|
||||
elif base_type is model.ConstraintNumber:
|
||||
typ = self.primitive_type_mapping[model.ConstraintNumber]()
|
||||
elif base_type is model.PrimitiveBoolean:
|
||||
@ -690,9 +673,7 @@ class TypeManager:
|
||||
if item_type.__class__ == lt.item_type.__class__:
|
||||
result_data_type = self.data_type_mapping[model.Array](
|
||||
item_type=item_type,
|
||||
description=sanitize_rust_docstrings(
|
||||
type_model.description
|
||||
),
|
||||
description=sanitize_rust_docstrings(type_model.description),
|
||||
)
|
||||
# logging.debug("Replacing Typ + list[Typ] with list[Typ]")
|
||||
elif len(kinds) == 1:
|
||||
@ -721,9 +702,7 @@ class TypeManager:
|
||||
result_data_type.kinds[enum_kind.name] = enum_kind
|
||||
|
||||
if is_nullable:
|
||||
result_data_type = self.option_type_class(
|
||||
item_type=result_data_type
|
||||
)
|
||||
result_data_type = self.option_type_class(item_type=result_data_type)
|
||||
|
||||
return result_data_type
|
||||
|
||||
@ -782,28 +761,18 @@ class TypeManager:
|
||||
kinds.remove(typ)
|
||||
elif string_klass in kinds_classes and integer_klass in kinds_classes:
|
||||
int_klass = next(
|
||||
(
|
||||
x
|
||||
for x in type_model.kinds
|
||||
if isinstance(x, model.ConstraintInteger)
|
||||
)
|
||||
(x for x in type_model.kinds if isinstance(x, model.ConstraintInteger))
|
||||
)
|
||||
if (
|
||||
# XX_size or XX_count is clearly an integer
|
||||
(
|
||||
enum_name
|
||||
and (
|
||||
enum_name.endswith("size")
|
||||
or enum_name.endswith("count")
|
||||
)
|
||||
and (enum_name.endswith("size") or enum_name.endswith("count"))
|
||||
)
|
||||
# There is certain limit (min/max) - it can be only integer
|
||||
or (
|
||||
int_klass
|
||||
and (
|
||||
int_klass.minimum is not None
|
||||
or int_klass.maximum is not None
|
||||
)
|
||||
and (int_klass.minimum is not None or int_klass.maximum is not None)
|
||||
)
|
||||
):
|
||||
for typ in list(kinds):
|
||||
@ -863,8 +832,7 @@ class TypeManager:
|
||||
# Try adding parent_name as prefix
|
||||
new_name = (
|
||||
"".join(
|
||||
x.title()
|
||||
for x in model_.reference.parent.name.split("_")
|
||||
x.title() for x in model_.reference.parent.name.split("_")
|
||||
)
|
||||
+ name
|
||||
)
|
||||
@ -883,8 +851,7 @@ class TypeManager:
|
||||
# Try adding parent_name as prefix
|
||||
new_other_name = (
|
||||
"".join(
|
||||
x.title()
|
||||
for x in other_model.parent.name.split("_")
|
||||
x.title() for x in other_model.parent.name.split("_")
|
||||
)
|
||||
+ name
|
||||
)
|
||||
@ -895,9 +862,9 @@ class TypeManager:
|
||||
# with remote being oneOf with multiple structs)
|
||||
# Try to make a name consisting of props
|
||||
props = model_data_type.fields.keys()
|
||||
new_new_name = name + "".join(
|
||||
x.title() for x in props
|
||||
).replace("_", "")
|
||||
new_new_name = name + "".join(x.title() for x in props).replace(
|
||||
"_", ""
|
||||
)
|
||||
if new_new_name not in unique_models:
|
||||
for other_ref, other_model in self.refs.items():
|
||||
other_name = getattr(other_model, "name", None)
|
||||
@ -913,9 +880,7 @@ class TypeManager:
|
||||
x.title() for x in props
|
||||
).replace("_", "")
|
||||
other_model.name = new_other_name
|
||||
unique_models[new_other_name] = (
|
||||
model_.reference
|
||||
)
|
||||
unique_models[new_other_name] = model_.reference
|
||||
|
||||
model_data_type.name = new_new_name
|
||||
unique_models[new_new_name] = model_.reference
|
||||
@ -924,9 +889,7 @@ class TypeManager:
|
||||
"Model name %s is already present" % new_new_name
|
||||
)
|
||||
else:
|
||||
raise RuntimeError(
|
||||
"Model name %s is already present" % new_name
|
||||
)
|
||||
raise RuntimeError("Model name %s is already present" % new_name)
|
||||
elif (
|
||||
name
|
||||
and name in unique_models
|
||||
@ -945,17 +908,9 @@ class TypeManager:
|
||||
def get_subtypes(self):
|
||||
"""Get all subtypes excluding TLA"""
|
||||
for k, v in self.refs.items():
|
||||
if (
|
||||
k
|
||||
and isinstance(v, (Enum, Struct, StringEnum))
|
||||
and k.name != "Body"
|
||||
):
|
||||
if k and isinstance(v, (Enum, Struct, StringEnum)) and k.name != "Body":
|
||||
yield v
|
||||
elif (
|
||||
k
|
||||
and k.name != "Body"
|
||||
and isinstance(v, self.option_type_class)
|
||||
):
|
||||
elif k and k.name != "Body" and isinstance(v, self.option_type_class):
|
||||
if isinstance(v.item_type, Enum):
|
||||
yield v.item_type
|
||||
|
||||
@ -967,10 +922,7 @@ class TypeManager:
|
||||
# There might be tuple Struct (with
|
||||
# fields as list)
|
||||
field_names = list(v.fields.keys())
|
||||
if (
|
||||
len(field_names) == 1
|
||||
and v.fields[field_names[0]].is_optional
|
||||
):
|
||||
if len(field_names) == 1 and v.fields[field_names[0]].is_optional:
|
||||
# A body with only field can not normally be optional
|
||||
logging.warning(
|
||||
"Request body with single root field cannot be optional"
|
||||
@ -1058,9 +1010,7 @@ class TypeManager:
|
||||
if ref == type_model.reference:
|
||||
sub_ref: model.Reference | None = None
|
||||
if ref.type == model.Struct:
|
||||
logging.debug(
|
||||
"Element is a struct. Purging also field types"
|
||||
)
|
||||
logging.debug("Element is a struct. Purging also field types")
|
||||
# For struct type we cascadely discard all field types as
|
||||
# well
|
||||
for v in type_model.fields.values():
|
||||
@ -1072,9 +1022,7 @@ class TypeManager:
|
||||
logging.debug(f"Need to purge also {sub_ref}")
|
||||
self.discard_model(sub_ref)
|
||||
elif ref.type == model.OneOfType:
|
||||
logging.debug(
|
||||
"Element is a OneOf. Purging also kinds types"
|
||||
)
|
||||
logging.debug("Element is a OneOf. Purging also kinds types")
|
||||
for v in type_model.kinds:
|
||||
if isinstance(v, model.Reference):
|
||||
sub_ref = v
|
||||
@ -1090,9 +1038,7 @@ class TypeManager:
|
||||
if isinstance(type_model.item_type, model.Reference):
|
||||
sub_ref = type_model.item_type
|
||||
else:
|
||||
sub_ref = getattr(
|
||||
type_model.item_type, "reference", None
|
||||
)
|
||||
sub_ref = getattr(type_model.item_type, "reference", None)
|
||||
if sub_ref:
|
||||
logging.debug(f"Need to purge also {sub_ref}")
|
||||
self.discard_model(sub_ref)
|
||||
|
@ -35,9 +35,7 @@ class TypeSchema(BaseModel):
|
||||
ref: Optional[str] = Field(alias="$ref", default=None)
|
||||
oneOf: Optional[List[Any]] = Field(default=None)
|
||||
anyOf: Optional[List[Any]] = Field(default=None)
|
||||
openstack: Optional[Dict[str, Any]] = Field(
|
||||
alias="x-openstack", default=None
|
||||
)
|
||||
openstack: Optional[Dict[str, Any]] = Field(alias="x-openstack", default=None)
|
||||
required: Optional[List[str]] = None
|
||||
pattern: Optional[str] = None
|
||||
maxLength: Optional[int] = None
|
||||
@ -115,9 +113,7 @@ class HeaderSchema(BaseModel):
|
||||
model_config = ConfigDict(extra="allow", populate_by_name=True)
|
||||
|
||||
description: Optional[str] = None
|
||||
openstack: Optional[Dict[str, Any]] = Field(
|
||||
alias="x-openstack", default=None
|
||||
)
|
||||
openstack: Optional[Dict[str, Any]] = Field(alias="x-openstack", default=None)
|
||||
schema: Optional[TypeSchema] = Field(default=None)
|
||||
|
||||
|
||||
|
@ -29,9 +29,7 @@ class JsonSchemaGenerator(BaseGenerator):
|
||||
properties = {}
|
||||
for k, v in res.attrs.items():
|
||||
field = v["attr"]
|
||||
properties[field.name] = TypeSchema.from_sdk_field(
|
||||
field
|
||||
).model_dump(
|
||||
properties[field.name] = TypeSchema.from_sdk_field(field).model_dump(
|
||||
exclude_none=True, exclude_defaults=True, by_alias=True
|
||||
)
|
||||
if "docs" in v:
|
||||
@ -56,9 +54,7 @@ class JsonSchemaGenerator(BaseGenerator):
|
||||
"properties": properties,
|
||||
}
|
||||
}
|
||||
schema = TypeSchema(
|
||||
type="object", properties=properties, description=""
|
||||
)
|
||||
schema = TypeSchema(type="object", properties=properties, description="")
|
||||
# if res.resource_class._store_unknown_attrs_as_properties:
|
||||
# schema_attrs["additionalProperties"] = True
|
||||
# schema_attrs["properties"] = properties
|
||||
|
@ -160,9 +160,8 @@ class MetadataGenerator(BaseGenerator):
|
||||
elif method == "put":
|
||||
operation_key = "update"
|
||||
elif method == "patch":
|
||||
if (
|
||||
"application/json"
|
||||
in operation.requestBody.get("content", {})
|
||||
if "application/json" in operation.requestBody.get(
|
||||
"content", {}
|
||||
):
|
||||
operation_key = "update"
|
||||
else:
|
||||
@ -171,10 +170,7 @@ class MetadataGenerator(BaseGenerator):
|
||||
operation_key = "create"
|
||||
elif method == "delete":
|
||||
operation_key = "delete"
|
||||
elif (
|
||||
path.endswith("/detail")
|
||||
and resource_name != "quota_set"
|
||||
):
|
||||
elif path.endswith("/detail") and resource_name != "quota_set":
|
||||
if method == "get":
|
||||
operation_key = "list_detailed"
|
||||
# elif path.endswith("/default"):
|
||||
@ -309,9 +305,8 @@ class MetadataGenerator(BaseGenerator):
|
||||
elif method == "head":
|
||||
operation_key = "check"
|
||||
elif method == "patch":
|
||||
if (
|
||||
"application/json"
|
||||
in operation.requestBody.get("content", {})
|
||||
if "application/json" in operation.requestBody.get(
|
||||
"content", {}
|
||||
):
|
||||
operation_key = "update"
|
||||
else:
|
||||
@ -340,9 +335,7 @@ class MetadataGenerator(BaseGenerator):
|
||||
elif method == "delete":
|
||||
operation_key = "delete"
|
||||
if not operation_key:
|
||||
logging.warn(
|
||||
f"Cannot identify op name for {path}:{method}"
|
||||
)
|
||||
logging.warn(f"Cannot identify op name for {path}:{method}")
|
||||
|
||||
# Next hacks
|
||||
if args.service_type == "identity" and resource_name in [
|
||||
@ -413,22 +406,16 @@ class MetadataGenerator(BaseGenerator):
|
||||
if operation_key in resource_model:
|
||||
raise RuntimeError("Operation name conflict")
|
||||
else:
|
||||
if (
|
||||
operation_key == "action"
|
||||
and args.service_type
|
||||
in [
|
||||
"compute",
|
||||
"block-storage",
|
||||
]
|
||||
):
|
||||
if operation_key == "action" and args.service_type in [
|
||||
"compute",
|
||||
"block-storage",
|
||||
]:
|
||||
# For action we actually have multiple independent operations
|
||||
try:
|
||||
body_schema = operation.requestBody["content"][
|
||||
"application/json"
|
||||
]["schema"]
|
||||
bodies = body_schema.get(
|
||||
"oneOf", [body_schema]
|
||||
)
|
||||
bodies = body_schema.get("oneOf", [body_schema])
|
||||
if len(bodies) > 1:
|
||||
discriminator = body_schema.get(
|
||||
"x-openstack", {}
|
||||
@ -439,23 +426,17 @@ class MetadataGenerator(BaseGenerator):
|
||||
% path
|
||||
)
|
||||
for body in bodies:
|
||||
action_name = body.get(
|
||||
"x-openstack", {}
|
||||
).get("action-name")
|
||||
action_name = body.get("x-openstack", {}).get(
|
||||
"action-name"
|
||||
)
|
||||
if not action_name:
|
||||
action_name = list(
|
||||
body["properties"].keys()
|
||||
)[0]
|
||||
action_name = list(body["properties"].keys())[0]
|
||||
# Hardcode fixes
|
||||
if (
|
||||
resource_name == "flavor"
|
||||
and action_name
|
||||
in [
|
||||
"update",
|
||||
"create",
|
||||
"delete",
|
||||
]
|
||||
):
|
||||
if resource_name == "flavor" and action_name in [
|
||||
"update",
|
||||
"create",
|
||||
"delete",
|
||||
]:
|
||||
# Flavor update/create/delete
|
||||
# operations are exposed ALSO as wsgi
|
||||
# actions. This is wrong and useless.
|
||||
@ -472,24 +453,16 @@ class MetadataGenerator(BaseGenerator):
|
||||
common.SPLIT_NAME_RE, action_name
|
||||
)
|
||||
).lower()
|
||||
rust_sdk_params = (
|
||||
get_rust_sdk_operation_args(
|
||||
"action",
|
||||
operation_name=action_name,
|
||||
module_name=get_module_name(
|
||||
action_name
|
||||
),
|
||||
)
|
||||
rust_sdk_params = get_rust_sdk_operation_args(
|
||||
"action",
|
||||
operation_name=action_name,
|
||||
module_name=get_module_name(action_name),
|
||||
)
|
||||
rust_cli_params = (
|
||||
get_rust_cli_operation_args(
|
||||
"action",
|
||||
operation_name=action_name,
|
||||
module_name=get_module_name(
|
||||
action_name
|
||||
),
|
||||
resource_name=resource_name,
|
||||
)
|
||||
rust_cli_params = get_rust_cli_operation_args(
|
||||
"action",
|
||||
operation_name=action_name,
|
||||
module_name=get_module_name(action_name),
|
||||
resource_name=resource_name,
|
||||
)
|
||||
|
||||
op_model = OperationModel(
|
||||
@ -498,12 +471,8 @@ class MetadataGenerator(BaseGenerator):
|
||||
)
|
||||
op_model.operation_type = "action"
|
||||
|
||||
op_model.targets["rust-sdk"] = (
|
||||
rust_sdk_params
|
||||
)
|
||||
op_model.targets["rust-cli"] = (
|
||||
rust_cli_params
|
||||
)
|
||||
op_model.targets["rust-sdk"] = rust_sdk_params
|
||||
op_model.targets["rust-cli"] = rust_cli_params
|
||||
|
||||
op_model = post_process_operation(
|
||||
args.service_type,
|
||||
@ -512,25 +481,17 @@ class MetadataGenerator(BaseGenerator):
|
||||
op_model,
|
||||
)
|
||||
|
||||
resource_model.operations[
|
||||
operation_name
|
||||
] = op_model
|
||||
resource_model.operations[operation_name] = op_model
|
||||
|
||||
except KeyError:
|
||||
raise RuntimeError(
|
||||
"Cannot get bodies for %s" % path
|
||||
)
|
||||
raise RuntimeError("Cannot get bodies for %s" % path)
|
||||
else:
|
||||
if not operation_key:
|
||||
raise NotImplementedError
|
||||
operation_type = get_operation_type_by_key(
|
||||
operation_key
|
||||
)
|
||||
operation_type = get_operation_type_by_key(operation_key)
|
||||
op_model.operation_type = operation_type
|
||||
# NOTE: sdk gets operation_key and not operation_type
|
||||
rust_sdk_params = get_rust_sdk_operation_args(
|
||||
operation_key
|
||||
)
|
||||
rust_sdk_params = get_rust_sdk_operation_args(operation_key)
|
||||
rust_cli_params = get_rust_cli_operation_args(
|
||||
operation_key, resource_name=resource_name
|
||||
)
|
||||
@ -562,9 +523,7 @@ class MetadataGenerator(BaseGenerator):
|
||||
list_op.targets.pop("rust-cli")
|
||||
|
||||
# Prepare `find` operation data
|
||||
if (list_op or list_detailed_op) and res_data.operations.get(
|
||||
"show"
|
||||
):
|
||||
if (list_op or list_detailed_op) and res_data.operations.get("show"):
|
||||
show_op = res_data.operations["show"]
|
||||
|
||||
(path, _, spec) = common.find_openapi_operation(
|
||||
@ -623,8 +582,7 @@ class MetadataGenerator(BaseGenerator):
|
||||
name_field = fqan.split(".")[-1]
|
||||
name_filter_supported: bool = False
|
||||
if name_field in [
|
||||
x.get("name")
|
||||
for x in list(list_spec.get("parameters", []))
|
||||
x.get("name") for x in list(list_spec.get("parameters", []))
|
||||
]:
|
||||
name_filter_supported = True
|
||||
|
||||
@ -808,9 +766,7 @@ def post_process_operation(
|
||||
return operation
|
||||
|
||||
|
||||
def post_process_compute_operation(
|
||||
resource_name: str, operation_name: str, operation
|
||||
):
|
||||
def post_process_compute_operation(resource_name: str, operation_name: str, operation):
|
||||
if resource_name == "aggregate":
|
||||
if operation_name in ["set-metadata", "add-host", "remove-host"]:
|
||||
operation.targets["rust-sdk"].response_key = "aggregate"
|
||||
@ -826,9 +782,7 @@ def post_process_compute_operation(
|
||||
operation.targets["rust-cli"].sdk_mod_name = "list"
|
||||
operation.targets["rust-cli"].operation_name = "list"
|
||||
operation.targets["rust-sdk"].response_key = "availabilityZoneInfo"
|
||||
operation.targets["rust-cli"].cli_full_command = (
|
||||
"availability-zone list"
|
||||
)
|
||||
operation.targets["rust-cli"].cli_full_command = "availability-zone list"
|
||||
elif operation_name == "list_detailed":
|
||||
operation.operation_type = "list"
|
||||
operation.targets["rust-sdk"].operation_name = "list_detail"
|
||||
@ -838,9 +792,9 @@ def post_process_compute_operation(
|
||||
operation.targets["rust-cli"].operation_name = "list"
|
||||
operation.targets["rust-cli"].module_name = "list_detail"
|
||||
operation.targets["rust-cli"].sdk_mod_name = "list_detail"
|
||||
operation.targets["rust-cli"].cli_full_command = (
|
||||
"availability-zone list-detail"
|
||||
)
|
||||
operation.targets[
|
||||
"rust-cli"
|
||||
].cli_full_command = "availability-zone list-detail"
|
||||
|
||||
elif resource_name == "keypair":
|
||||
if operation_name == "list":
|
||||
@ -923,9 +877,7 @@ def post_process_compute_operation(
|
||||
return operation
|
||||
|
||||
|
||||
def post_process_identity_operation(
|
||||
resource_name: str, operation_name: str, operation
|
||||
):
|
||||
def post_process_identity_operation(resource_name: str, operation_name: str, operation):
|
||||
if resource_name == "role/imply":
|
||||
if operation_name == "list":
|
||||
operation.targets["rust-cli"].response_key = "role_inference"
|
||||
@ -982,9 +934,7 @@ def post_process_identity_operation(
|
||||
return operation
|
||||
|
||||
|
||||
def post_process_image_operation(
|
||||
resource_name: str, operation_name: str, operation
|
||||
):
|
||||
def post_process_image_operation(resource_name: str, operation_name: str, operation):
|
||||
if resource_name.startswith("schema"):
|
||||
# Image schemas are a JSON operation
|
||||
operation.targets["rust-cli"].operation_type = "json"
|
||||
@ -994,25 +944,16 @@ def post_process_image_operation(
|
||||
elif resource_name == "metadef/namespace" and operation_name != "list":
|
||||
operation.targets["rust-sdk"].response_key = "null"
|
||||
operation.targets["rust-cli"].response_key = "null"
|
||||
elif (
|
||||
resource_name == "metadef/namespace/property"
|
||||
and operation_name == "list"
|
||||
):
|
||||
elif resource_name == "metadef/namespace/property" and operation_name == "list":
|
||||
operation.targets["rust-cli"].operation_type = "list_from_struct"
|
||||
operation.targets["rust-cli"].response_key = "properties"
|
||||
operation.targets["rust-sdk"].response_key = "properties"
|
||||
elif resource_name == "metadef/namespace/resource_type":
|
||||
operation.targets["rust-cli"].response_key = (
|
||||
"resource_type_associations"
|
||||
)
|
||||
operation.targets["rust-sdk"].response_key = (
|
||||
"resource_type_associations"
|
||||
)
|
||||
operation.targets["rust-cli"].response_key = "resource_type_associations"
|
||||
operation.targets["rust-sdk"].response_key = "resource_type_associations"
|
||||
operation.targets["rust-cli"].cli_full_command = operation.targets[
|
||||
"rust-cli"
|
||||
].cli_full_command.replace(
|
||||
"resource-type", "resource-type-association"
|
||||
)
|
||||
].cli_full_command.replace("resource-type", "resource-type-association")
|
||||
elif resource_name == "image":
|
||||
if operation_name == "patch":
|
||||
operation.targets["rust-cli"].cli_full_command = operation.targets[
|
||||
@ -1069,9 +1010,7 @@ def post_process_block_storage_operation(
|
||||
if "update-snapshot-status" in operation_name:
|
||||
operation.targets["rust-cli"].cli_full_command = operation.targets[
|
||||
"rust-cli"
|
||||
].cli_full_command.replace(
|
||||
"update-snapshot-status", "update-status"
|
||||
)
|
||||
].cli_full_command.replace("update-snapshot-status", "update-status")
|
||||
|
||||
if resource_name in ["os_volume_transfer", "volume_transfer"]:
|
||||
if operation_name in ["list", "list_detailed"]:
|
||||
@ -1091,9 +1030,7 @@ def post_process_block_storage_operation(
|
||||
operation.targets["rust-cli"].sdk_mod_name = "list"
|
||||
operation.targets["rust-cli"].operation_name = "list"
|
||||
operation.targets["rust-sdk"].response_key = "availabilityZoneInfo"
|
||||
operation.targets["rust-cli"].cli_full_command = (
|
||||
"availability-zone list"
|
||||
)
|
||||
operation.targets["rust-cli"].cli_full_command = "availability-zone list"
|
||||
if resource_name == "qos_spec/association":
|
||||
operation.operation_type = "list"
|
||||
operation.targets["rust-sdk"].operation_name = "list"
|
||||
@ -1103,9 +1040,7 @@ def post_process_block_storage_operation(
|
||||
operation.targets["rust-cli"].sdk_mod_name = "list"
|
||||
operation.targets["rust-sdk"].response_key = "qos_associations"
|
||||
operation.targets["rust-cli"].response_key = "qos_associations"
|
||||
operation.targets["rust-cli"].cli_full_command = (
|
||||
"qos-spec association list"
|
||||
)
|
||||
operation.targets["rust-cli"].cli_full_command = "qos-spec association list"
|
||||
|
||||
if resource_name == "limit" and operation_name == "list":
|
||||
# Limits API return object and not a list
|
||||
@ -1119,9 +1054,7 @@ def post_process_block_storage_operation(
|
||||
return operation
|
||||
|
||||
|
||||
def post_process_network_operation(
|
||||
resource_name: str, operation_name: str, operation
|
||||
):
|
||||
def post_process_network_operation(resource_name: str, operation_name: str, operation):
|
||||
if resource_name.startswith("floatingip"):
|
||||
operation.targets["rust-cli"].cli_full_command = operation.targets[
|
||||
"rust-cli"
|
||||
|
@ -185,9 +185,7 @@ class JsonSchemaParser:
|
||||
) -> ty.Tuple[ADT | None, list[ADT]]:
|
||||
"""Parse JsonSchema object into internal DataModel"""
|
||||
results: list[ADT] = []
|
||||
res = self.parse_schema(
|
||||
schema, results, ignore_read_only=ignore_read_only
|
||||
)
|
||||
res = self.parse_schema(schema, results, ignore_read_only=ignore_read_only)
|
||||
return (res, results)
|
||||
|
||||
def parse_schema(
|
||||
@ -409,9 +407,7 @@ class JsonSchemaParser:
|
||||
if pattern_props and not additional_properties_type:
|
||||
# `"type": "object", "pattern_properties": ...`
|
||||
if len(list(pattern_props.values())) == 1:
|
||||
obj = Dictionary(
|
||||
value_type=list(pattern_props.values())[0]
|
||||
)
|
||||
obj = Dictionary(value_type=list(pattern_props.values())[0])
|
||||
else:
|
||||
obj = Struct(pattern_properties=pattern_props)
|
||||
elif not pattern_props and additional_properties_type:
|
||||
@ -452,15 +448,9 @@ class JsonSchemaParser:
|
||||
|
||||
if obj:
|
||||
obj.description = schema.get("description")
|
||||
if (
|
||||
obj.reference
|
||||
and f"{obj.reference.name}{obj.reference.type}"
|
||||
in [
|
||||
f"{x.reference.name}{x.reference.type}"
|
||||
for x in results
|
||||
if x.reference
|
||||
]
|
||||
):
|
||||
if obj.reference and f"{obj.reference.name}{obj.reference.type}" in [
|
||||
f"{x.reference.name}{x.reference.type}" for x in results if x.reference
|
||||
]:
|
||||
if obj.reference.__hash__() in [
|
||||
x.reference.__hash__() for x in results if x.reference
|
||||
]:
|
||||
@ -476,9 +466,9 @@ class JsonSchemaParser:
|
||||
if parent and name:
|
||||
new_name = parent.name + "_" + name
|
||||
|
||||
if Reference(
|
||||
name=new_name, type=obj.reference.type
|
||||
) in [x.reference for x in results]:
|
||||
if Reference(name=new_name, type=obj.reference.type) in [
|
||||
x.reference for x in results
|
||||
]:
|
||||
raise NotImplementedError
|
||||
else:
|
||||
obj.reference.name = new_name
|
||||
@ -716,17 +706,17 @@ class OpenAPISchemaParser(JsonSchemaParser):
|
||||
# Param type can be anything. Process supported combinations first
|
||||
if param_location == "query" and param_name == "limit":
|
||||
dt = ConstraintInteger(minimum=0)
|
||||
elif param_location == "query" and sorted(
|
||||
["string", "boolean"]
|
||||
) == sorted(param_typ):
|
||||
elif param_location == "query" and sorted(["string", "boolean"]) == sorted(
|
||||
param_typ
|
||||
):
|
||||
dt = PrimitiveBoolean()
|
||||
elif param_location == "query" and sorted(
|
||||
["string", "integer"]
|
||||
) == sorted(param_typ):
|
||||
elif param_location == "query" and sorted(["string", "integer"]) == sorted(
|
||||
param_typ
|
||||
):
|
||||
dt = ConstraintInteger(**param_schema)
|
||||
elif param_location == "query" and sorted(
|
||||
["string", "number"]
|
||||
) == sorted(param_typ):
|
||||
elif param_location == "query" and sorted(["string", "number"]) == sorted(
|
||||
param_typ
|
||||
):
|
||||
dt = ConstraintNumber(**param_schema)
|
||||
|
||||
if isinstance(dt, ADT):
|
||||
@ -738,9 +728,7 @@ class OpenAPISchemaParser(JsonSchemaParser):
|
||||
is_flag: bool = False
|
||||
os_ext = schema.get("x-openstack", {})
|
||||
if not isinstance(os_ext, dict):
|
||||
raise RuntimeError(
|
||||
f"x-openstack must be a dictionary inside {schema}"
|
||||
)
|
||||
raise RuntimeError(f"x-openstack must be a dictionary inside {schema}")
|
||||
if "is-flag" in os_ext:
|
||||
is_flag = os_ext["is-flag"]
|
||||
|
||||
|
@ -150,9 +150,7 @@ class OpenStackServerSourceBase:
|
||||
if os_ext == {}:
|
||||
v.openstack = None
|
||||
|
||||
def _process_route(
|
||||
self, route, openapi_spec, ver_prefix=None, framework=None
|
||||
):
|
||||
def _process_route(self, route, openapi_spec, ver_prefix=None, framework=None):
|
||||
# Placement exposes "action" as controller in route defaults, all others - "controller"
|
||||
if not ("controller" in route.defaults or "action" in route.defaults):
|
||||
return
|
||||
@ -176,17 +174,11 @@ class OpenStackServerSourceBase:
|
||||
|
||||
# if "method" not in route.conditions:
|
||||
# raise RuntimeError("Method not set for %s", route)
|
||||
method = (
|
||||
route.conditions.get("method", "GET")[0]
|
||||
if route.conditions
|
||||
else "GET"
|
||||
)
|
||||
method = route.conditions.get("method", "GET")[0] if route.conditions else "GET"
|
||||
|
||||
controller = route.defaults.get("controller")
|
||||
action = route.defaults.get("action")
|
||||
logging.info(
|
||||
"Path: %s; method: %s; operation: %s", path, method, action
|
||||
)
|
||||
logging.info("Path: %s; method: %s; operation: %s", path, method, action)
|
||||
|
||||
versioned_methods = {}
|
||||
controller_actions = {}
|
||||
@ -239,9 +231,7 @@ class OpenStackServerSourceBase:
|
||||
for path_element in path_elements:
|
||||
if "{" in path_element:
|
||||
param_name = path_element.strip("{}")
|
||||
global_param_name = (
|
||||
"_".join(path_resource_names) + f"_{param_name}"
|
||||
)
|
||||
global_param_name = "_".join(path_resource_names) + f"_{param_name}"
|
||||
|
||||
param_ref_name = self._get_param_ref(
|
||||
openapi_spec,
|
||||
@ -251,9 +241,7 @@ class OpenStackServerSourceBase:
|
||||
path=path,
|
||||
)
|
||||
# Ensure reference to the param is in the path_params
|
||||
if param_ref_name not in [
|
||||
k.ref for k in [p for p in path_params]
|
||||
]:
|
||||
if param_ref_name not in [k.ref for k in [p for p in path_params]]:
|
||||
path_params.append(ParameterSchema(ref=param_ref_name))
|
||||
# Cleanup path_resource_names
|
||||
# if len(path_resource_names) > 0 and VERSION_RE.match(path_resource_names[0]):
|
||||
@ -275,8 +263,7 @@ class OpenStackServerSourceBase:
|
||||
operation_id = re.sub(
|
||||
r"^(/?v[0-9.]*/)",
|
||||
"",
|
||||
"/".join([x.strip("{}") for x in path_elements])
|
||||
+ f":{method.lower()}", # noqa
|
||||
"/".join([x.strip("{}") for x in path_elements]) + f":{method.lower()}", # noqa
|
||||
)
|
||||
|
||||
if action in versioned_methods:
|
||||
@ -376,9 +363,7 @@ class OpenStackServerSourceBase:
|
||||
for action, op_name in controller_actions.items():
|
||||
logging.info("Action %s: %s", action, op_name)
|
||||
(start_version, end_version) = (None, None)
|
||||
action_impls: list[tuple[Callable, str | None, str | None]] = (
|
||||
[]
|
||||
)
|
||||
action_impls: list[tuple[Callable, str | None, str | None]] = []
|
||||
if isinstance(op_name, str):
|
||||
# wsgi action value is a string
|
||||
if op_name in versioned_methods:
|
||||
@ -391,9 +376,7 @@ class OpenStackServerSourceBase:
|
||||
ver_method.end_version,
|
||||
)
|
||||
)
|
||||
logging.info(
|
||||
"Versioned action %s", ver_method.func
|
||||
)
|
||||
logging.info("Versioned action %s", ver_method.func)
|
||||
elif hasattr(contr, op_name):
|
||||
# ACTION with no version bounds
|
||||
func = getattr(contr, op_name)
|
||||
@ -430,9 +413,7 @@ class OpenStackServerSourceBase:
|
||||
ver_method.end_version,
|
||||
)
|
||||
)
|
||||
logging.info(
|
||||
"Versioned action %s", ver_method.func
|
||||
)
|
||||
logging.info("Versioned action %s", ver_method.func)
|
||||
elif slf and key:
|
||||
vm = getattr(slf, "versioned_methods", None)
|
||||
if vm and key in vm:
|
||||
@ -450,9 +431,7 @@ class OpenStackServerSourceBase:
|
||||
ver_method.end_version,
|
||||
)
|
||||
)
|
||||
logging.info(
|
||||
"Versioned action %s", ver_method.func
|
||||
)
|
||||
logging.info("Versioned action %s", ver_method.func)
|
||||
else:
|
||||
action_impls.append((op_name, None, None))
|
||||
|
||||
@ -595,9 +574,7 @@ class OpenStackServerSourceBase:
|
||||
if operation_spec.description:
|
||||
# Reading spec from yaml file it was converted back to regular
|
||||
# string. Therefore need to force it back to Literal block.
|
||||
operation_spec.description = LiteralScalarString(
|
||||
operation_spec.description
|
||||
)
|
||||
operation_spec.description = LiteralScalarString(operation_spec.description)
|
||||
|
||||
action_name = None
|
||||
query_params_versions = []
|
||||
@ -614,22 +591,13 @@ class OpenStackServerSourceBase:
|
||||
if not (
|
||||
"min-ver" in operation_spec.openstack
|
||||
and tuple(
|
||||
[
|
||||
int(x)
|
||||
for x in operation_spec.openstack["min-ver"].split(".")
|
||||
]
|
||||
[int(x) for x in operation_spec.openstack["min-ver"].split(".")]
|
||||
)
|
||||
< (self._api_ver(start_version))
|
||||
):
|
||||
operation_spec.openstack["min-ver"] = (
|
||||
start_version.get_string()
|
||||
)
|
||||
operation_spec.openstack["min-ver"] = start_version.get_string()
|
||||
|
||||
if (
|
||||
mode != "action"
|
||||
and end_version
|
||||
and self._api_ver_major(end_version)
|
||||
):
|
||||
if mode != "action" and end_version and self._api_ver_major(end_version):
|
||||
if self._api_ver_major(end_version) == 0:
|
||||
operation_spec.openstack.pop("max-ver", None)
|
||||
operation_spec.deprecated = None
|
||||
@ -641,18 +609,11 @@ class OpenStackServerSourceBase:
|
||||
if not (
|
||||
"max-ver" in operation_spec.openstack
|
||||
and tuple(
|
||||
[
|
||||
int(x)
|
||||
for x in operation_spec.openstack["max-ver"].split(
|
||||
"."
|
||||
)
|
||||
]
|
||||
[int(x) for x in operation_spec.openstack["max-ver"].split(".")]
|
||||
)
|
||||
> self._api_ver(end_version)
|
||||
):
|
||||
operation_spec.openstack["max-ver"] = (
|
||||
end_version.get_string()
|
||||
)
|
||||
operation_spec.openstack["max-ver"] = end_version.get_string()
|
||||
|
||||
action_name = getattr(func, "wsgi_action", None)
|
||||
if action_name:
|
||||
@ -708,9 +669,7 @@ class OpenStackServerSourceBase:
|
||||
if query_params_versions:
|
||||
so = sorted(
|
||||
query_params_versions,
|
||||
key=lambda d: (
|
||||
tuple(map(int, d[1].split("."))) if d[1] else (0, 0)
|
||||
),
|
||||
key=lambda d: (tuple(map(int, d[1].split("."))) if d[1] else (0, 0)),
|
||||
)
|
||||
for data, min_ver, max_ver in so:
|
||||
self.process_query_parameters(
|
||||
@ -770,9 +729,7 @@ class OpenStackServerSourceBase:
|
||||
"".join([x.title() for x in path_resource_names])
|
||||
+ (
|
||||
operation_name.replace("index", "list").title()
|
||||
if not path_resource_names[-1].endswith(
|
||||
operation_name
|
||||
)
|
||||
if not path_resource_names[-1].endswith(operation_name)
|
||||
else ""
|
||||
)
|
||||
+ "Response"
|
||||
@ -805,24 +762,20 @@ class OpenStackServerSourceBase:
|
||||
curr_oneOf = curr_schema.oneOf
|
||||
curr_ref = curr_schema.ref
|
||||
if curr_oneOf:
|
||||
if schema_ref not in [
|
||||
x["$ref"] for x in curr_oneOf
|
||||
]:
|
||||
if schema_ref not in [x["$ref"] for x in curr_oneOf]:
|
||||
curr_oneOf.append({"$ref": schema_ref})
|
||||
elif curr_ref and curr_ref != schema_ref:
|
||||
rsp["content"]["application/json"][
|
||||
"schema"
|
||||
] = TypeSchema(
|
||||
oneOf=[
|
||||
{"$ref": curr_ref},
|
||||
{"$ref": schema_ref},
|
||||
]
|
||||
rsp["content"]["application/json"]["schema"] = (
|
||||
TypeSchema(
|
||||
oneOf=[
|
||||
{"$ref": curr_ref},
|
||||
{"$ref": schema_ref},
|
||||
]
|
||||
)
|
||||
)
|
||||
else:
|
||||
rsp["content"] = {
|
||||
"application/json": {
|
||||
"schema": {"$ref": schema_ref}
|
||||
}
|
||||
"application/json": {"schema": {"$ref": schema_ref}}
|
||||
}
|
||||
|
||||
# Ensure operation tags are existing
|
||||
@ -830,9 +783,7 @@ class OpenStackServerSourceBase:
|
||||
if tag not in [x["name"] for x in openapi_spec.tags]:
|
||||
openapi_spec.tags.append({"name": tag})
|
||||
|
||||
self._post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path=path
|
||||
)
|
||||
self._post_process_operation_hook(openapi_spec, operation_spec, path=path)
|
||||
|
||||
def _post_process_operation_hook(
|
||||
self, openapi_spec, operation_spec, path: str | None = None
|
||||
@ -867,9 +818,7 @@ class OpenStackServerSourceBase:
|
||||
# Nova added empty params since it was never validating them. Skip
|
||||
param_attrs["schema"] = TypeSchema(type="string")
|
||||
elif spec["type"] == "array":
|
||||
param_attrs["schema"] = TypeSchema(
|
||||
**copy.deepcopy(spec["items"])
|
||||
)
|
||||
param_attrs["schema"] = TypeSchema(**copy.deepcopy(spec["items"]))
|
||||
else:
|
||||
param_attrs["schema"] = TypeSchema(**copy.deepcopy(spec))
|
||||
param_attrs["description"] = spec.get("description")
|
||||
@ -888,14 +837,10 @@ class OpenStackServerSourceBase:
|
||||
**param_attrs,
|
||||
)
|
||||
if ref_name not in [x.ref for x in operation_spec.parameters]:
|
||||
operation_spec.parameters.append(
|
||||
ParameterSchema(ref=ref_name)
|
||||
)
|
||||
operation_spec.parameters.append(ParameterSchema(ref=ref_name))
|
||||
|
||||
else:
|
||||
raise RuntimeError(
|
||||
"Query parameters %s is not an object as expected" % obj
|
||||
)
|
||||
raise RuntimeError("Query parameters %s is not an object as expected" % obj)
|
||||
|
||||
def process_body_parameters(
|
||||
self,
|
||||
@ -918,9 +863,7 @@ class OpenStackServerSourceBase:
|
||||
if action_name:
|
||||
path_resource_names.append(action_name)
|
||||
|
||||
cont_schema_name = (
|
||||
"".join([x.title() for x in path_resource_names]) + "Request"
|
||||
)
|
||||
cont_schema_name = "".join([x.title() for x in path_resource_names]) + "Request"
|
||||
cont_schema = None
|
||||
|
||||
if body_schemas is not UNSET and len(body_schemas) == 1:
|
||||
@ -929,9 +872,7 @@ class OpenStackServerSourceBase:
|
||||
if True: # body_schemas[0] is not UNSET:
|
||||
if cont_schema_name in openapi_spec.components.schemas:
|
||||
# if we have already oneOf - add there
|
||||
cont_schema = openapi_spec.components.schemas[
|
||||
cont_schema_name
|
||||
]
|
||||
cont_schema = openapi_spec.components.schemas[cont_schema_name]
|
||||
if cont_schema.oneOf and body_schemas[0] not in [
|
||||
x["$ref"] for x in cont_schema.oneOf
|
||||
]:
|
||||
@ -951,9 +892,7 @@ class OpenStackServerSourceBase:
|
||||
)
|
||||
cont_schema = openapi_spec.components.schemas.setdefault(
|
||||
cont_schema_name,
|
||||
TypeSchema(
|
||||
oneOf=[], openstack={"discriminator": "microversion"}
|
||||
),
|
||||
TypeSchema(oneOf=[], openstack={"discriminator": "microversion"}),
|
||||
)
|
||||
# Add new refs to the container oneOf if they are not already
|
||||
# there
|
||||
@ -1010,9 +949,7 @@ class OpenStackServerSourceBase:
|
||||
js_content = op_body.setdefault(mime_type, {})
|
||||
body_schema = js_content.setdefault("schema", {})
|
||||
one_of = body_schema.setdefault("oneOf", [])
|
||||
if schema_ref and schema_ref not in [
|
||||
x.get("$ref") for x in one_of
|
||||
]:
|
||||
if schema_ref and schema_ref not in [x.get("$ref") for x in one_of]:
|
||||
one_of.append({"$ref": schema_ref})
|
||||
os_ext = body_schema.setdefault("x-openstack", {})
|
||||
os_ext["discriminator"] = "action"
|
||||
@ -1022,13 +959,11 @@ class OpenStackServerSourceBase:
|
||||
op_body = operation_spec.requestBody.setdefault("content", {})
|
||||
js_content = op_body.setdefault(mime_type, {})
|
||||
body_schema = js_content.setdefault("schema", {})
|
||||
operation_spec.requestBody["content"][mime_type]["schema"] = (
|
||||
TypeSchema(ref=schema_ref)
|
||||
operation_spec.requestBody["content"][mime_type]["schema"] = TypeSchema(
|
||||
ref=schema_ref
|
||||
)
|
||||
|
||||
def _sanitize_schema(
|
||||
self, schema, *, start_version=None, end_version=None
|
||||
):
|
||||
def _sanitize_schema(self, schema, *, start_version=None, end_version=None):
|
||||
"""Various schemas are broken in various ways"""
|
||||
|
||||
if isinstance(schema, dict):
|
||||
@ -1052,11 +987,7 @@ class OpenStackServerSourceBase:
|
||||
if typ == "array" and "additionalItems" in v:
|
||||
# additionalItems have nothing to do under the type array (create servergroup)
|
||||
schema.properties[k].pop("additionalItems")
|
||||
if (
|
||||
typ == "array"
|
||||
and "items" in v
|
||||
and isinstance(v["items"], list)
|
||||
):
|
||||
if typ == "array" and "items" in v and isinstance(v["items"], list):
|
||||
# server_group create - type array "items" is a dict and not list
|
||||
# NOTE: server_groups recently changed to "prefixItems",
|
||||
# so this may be not necessary anymore
|
||||
@ -1092,9 +1023,7 @@ class OpenStackServerSourceBase:
|
||||
else:
|
||||
os_ext = None
|
||||
# Ensure global parameter is present
|
||||
param = ParameterSchema(
|
||||
location=param_location, name=param_name, **param_attrs
|
||||
)
|
||||
param = ParameterSchema(location=param_location, name=param_name, **param_attrs)
|
||||
if param_location == "path":
|
||||
param.required = True
|
||||
if not param.description and path:
|
||||
@ -1125,8 +1054,7 @@ class OpenStackServerSourceBase:
|
||||
if (
|
||||
old_max_ver
|
||||
and max_ver
|
||||
and tuple(old_max_ver.split("."))
|
||||
> tuple(max_ver.split("."))
|
||||
and tuple(old_max_ver.split(".")) > tuple(max_ver.split("."))
|
||||
):
|
||||
# Existing param has max_ver higher then what we have now. Keep old value
|
||||
os_ext["max_ver"] = old_max_ver
|
||||
@ -1146,9 +1074,7 @@ class OpenStackServerSourceBase:
|
||||
action_name=None,
|
||||
) -> tuple[str | None, str | None]:
|
||||
if schema_def is UNSET:
|
||||
logging.warn(
|
||||
"No Schema definition for %s[%s] is known", name, action_name
|
||||
)
|
||||
logging.warn("No Schema definition for %s[%s] is known", name, action_name)
|
||||
# Create dummy schema since we got no data for it
|
||||
schema_def = {
|
||||
"type": "object",
|
||||
@ -1230,9 +1156,7 @@ class OpenStackServerSourceBase:
|
||||
if isinstance(expected_errors, list):
|
||||
expected_errors = [
|
||||
str(x)
|
||||
for x in filter(
|
||||
lambda x: isinstance(x, int), expected_errors
|
||||
)
|
||||
for x in filter(lambda x: isinstance(x, int), expected_errors)
|
||||
]
|
||||
elif isinstance(expected_errors, int):
|
||||
expected_errors = [str(expected_errors)]
|
||||
@ -1254,21 +1178,15 @@ class OpenStackServerSourceBase:
|
||||
typ_name = (
|
||||
"".join([x.title() for x in path_resource_names])
|
||||
+ func.__name__.title()
|
||||
+ (
|
||||
f"_{min_ver.replace('.', '')}"
|
||||
if min_ver
|
||||
else ""
|
||||
)
|
||||
+ (f"_{min_ver.replace('.', '')}" if min_ver else "")
|
||||
)
|
||||
comp_schema = (
|
||||
openapi_spec.components.schemas.setdefault(
|
||||
typ_name,
|
||||
self._sanitize_schema(
|
||||
copy.deepcopy(obj),
|
||||
start_version=start_version,
|
||||
end_version=end_version,
|
||||
),
|
||||
)
|
||||
comp_schema = openapi_spec.components.schemas.setdefault(
|
||||
typ_name,
|
||||
self._sanitize_schema(
|
||||
copy.deepcopy(obj),
|
||||
start_version=start_version,
|
||||
end_version=end_version,
|
||||
),
|
||||
)
|
||||
|
||||
if min_ver:
|
||||
@ -1378,9 +1296,7 @@ def _convert_wsme_to_jsonschema(body_spec):
|
||||
elif wtypes.isdict(body_spec):
|
||||
res = {
|
||||
"type": "object",
|
||||
"additionalProperties": _convert_wsme_to_jsonschema(
|
||||
body_spec.value_type
|
||||
),
|
||||
"additionalProperties": _convert_wsme_to_jsonschema(body_spec.value_type),
|
||||
}
|
||||
elif wtypes.isusertype(body_spec):
|
||||
basetype = body_spec.basetype
|
||||
@ -1396,10 +1312,7 @@ def _convert_wsme_to_jsonschema(body_spec):
|
||||
res = {"type": "boolean"}
|
||||
elif body_spec is float:
|
||||
res = {"type": "number", "format": "float"}
|
||||
elif (
|
||||
isinstance(body_spec, wtypes.dt_types)
|
||||
or body_spec is datetime.datetime
|
||||
):
|
||||
elif isinstance(body_spec, wtypes.dt_types) or body_spec is datetime.datetime:
|
||||
res = {"type": "string", "format": "date-time"}
|
||||
else:
|
||||
raise RuntimeError("Unsupported object %s" % body_spec)
|
||||
|
@ -148,9 +148,7 @@ class CinderV3Generator(OpenStackServerSourceBase):
|
||||
|
||||
# Set global parameters
|
||||
for name, definition in volume.VOLUME_PARAMETERS.items():
|
||||
openapi_spec.components.parameters[name] = ParameterSchema(
|
||||
**definition
|
||||
)
|
||||
openapi_spec.components.parameters[name] = ParameterSchema(**definition)
|
||||
|
||||
for route in self.router.map.matchlist:
|
||||
# if route.routepath.startswith("/{project"):
|
||||
@ -158,9 +156,7 @@ class CinderV3Generator(OpenStackServerSourceBase):
|
||||
if route.routepath.endswith(".:(format)"):
|
||||
continue
|
||||
|
||||
if route.routepath.startswith(
|
||||
"/extensions"
|
||||
) or route.routepath.startswith(
|
||||
if route.routepath.startswith("/extensions") or route.routepath.startswith(
|
||||
"/{project_id:[0-9a-f\-]+}/extensions"
|
||||
):
|
||||
if route.defaults.get("action") != "index":
|
||||
|
@ -104,9 +104,7 @@ ATTACHMENTS_DETAIL_SCHEMA: dict[str, Any] = {
|
||||
}
|
||||
|
||||
|
||||
def _post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path: str | None = None
|
||||
):
|
||||
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
operationId = operation_spec.operationId
|
||||
if operationId in [
|
||||
|
@ -253,9 +253,7 @@ BACKUP_RECORD_SCHEMA: dict[str, Any] = {
|
||||
}
|
||||
|
||||
|
||||
def _post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path: str | None = None
|
||||
):
|
||||
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
operationId = operation_spec.operationId
|
||||
if operationId in [
|
||||
@ -265,9 +263,7 @@ def _post_process_operation_hook(
|
||||
"backups/detail:get",
|
||||
]:
|
||||
for key, val in BACKUP_LIST_PARAMETERS.items():
|
||||
openapi_spec.components.parameters.setdefault(
|
||||
key, ParameterSchema(**val)
|
||||
)
|
||||
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
|
||||
ref = f"#/components/parameters/{key}"
|
||||
if ref not in [x.ref for x in operation_spec.parameters]:
|
||||
operation_spec.parameters.append(ParameterSchema(ref=ref))
|
||||
@ -288,9 +284,7 @@ def _get_schema_ref(
|
||||
)
|
||||
ref = f"#/components/schemas/{name}"
|
||||
elif name == "BackupsListResponse":
|
||||
openapi_spec.components.schemas.setdefault(
|
||||
name, TypeSchema(**BACKUPS_SCHEMA)
|
||||
)
|
||||
openapi_spec.components.schemas.setdefault(name, TypeSchema(**BACKUPS_SCHEMA))
|
||||
ref = f"#/components/schemas/{name}"
|
||||
elif name in [
|
||||
"BackupsCreateResponse",
|
||||
|
@ -215,9 +215,7 @@ CLUSTERS_LIST_DETAIL_PARAMETERS: dict[str, Any] = {
|
||||
}
|
||||
|
||||
|
||||
def _post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path: str | None = None
|
||||
):
|
||||
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
operationId = operation_spec.operationId
|
||||
if operationId in [
|
||||
@ -225,9 +223,7 @@ def _post_process_operation_hook(
|
||||
"clusters:get",
|
||||
]:
|
||||
for key, val in CLUSTERS_LIST_PARAMETERS.items():
|
||||
openapi_spec.components.parameters.setdefault(
|
||||
key, ParameterSchema(**val)
|
||||
)
|
||||
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
|
||||
ref = f"#/components/parameters/{key}"
|
||||
if ref not in [x.ref for x in operation_spec.parameters]:
|
||||
operation_spec.parameters.append(ParameterSchema(ref=ref))
|
||||
@ -237,9 +233,7 @@ def _post_process_operation_hook(
|
||||
"clusters/detail:get",
|
||||
]:
|
||||
for key, val in CLUSTERS_LIST_DETAIL_PARAMETERS.items():
|
||||
openapi_spec.components.parameters.setdefault(
|
||||
key, ParameterSchema(**val)
|
||||
)
|
||||
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
|
||||
ref = f"#/components/parameters/{key}"
|
||||
if ref not in [x.ref for x in operation_spec.parameters]:
|
||||
operation_spec.parameters.append(ParameterSchema(ref=ref))
|
||||
@ -260,9 +254,7 @@ def _get_schema_ref(
|
||||
)
|
||||
ref = f"#/components/schemas/{name}"
|
||||
elif name == "ClustersListResponse":
|
||||
openapi_spec.components.schemas.setdefault(
|
||||
name, TypeSchema(**CLUSTERS_SCHEMA)
|
||||
)
|
||||
openapi_spec.components.schemas.setdefault(name, TypeSchema(**CLUSTERS_SCHEMA))
|
||||
ref = f"#/components/schemas/{name}"
|
||||
elif name == "ClusterUpdateRequest":
|
||||
openapi_spec.components.schemas.setdefault(
|
||||
|
@ -50,9 +50,7 @@ EXTENSIONS_SCHEMA: dict[str, Any] = {
|
||||
}
|
||||
|
||||
|
||||
def _post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path: str | None = None
|
||||
):
|
||||
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
operationId = operation_spec.operationId
|
||||
if operationId in [
|
||||
|
@ -207,9 +207,7 @@ GROUP_REPLICATION_TARGETS_SCHEMA: dict[str, Any] = {
|
||||
}
|
||||
|
||||
|
||||
def _post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path: str | None = None
|
||||
):
|
||||
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
operationId = operation_spec.operationId
|
||||
if operationId in [
|
||||
@ -219,9 +217,7 @@ def _post_process_operation_hook(
|
||||
"groups/detail:get",
|
||||
]:
|
||||
for key, val in GROUP_LIST_PARAMETERS.items():
|
||||
openapi_spec.components.parameters.setdefault(
|
||||
key, ParameterSchema(**val)
|
||||
)
|
||||
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
|
||||
ref = f"#/components/parameters/{key}"
|
||||
if ref not in [x.ref for x in operation_spec.parameters]:
|
||||
operation_spec.parameters.append(ParameterSchema(ref=ref))
|
||||
@ -242,9 +238,7 @@ def _get_schema_ref(
|
||||
)
|
||||
ref = f"#/components/schemas/{name}"
|
||||
elif name == "GroupsListResponse":
|
||||
openapi_spec.components.schemas.setdefault(
|
||||
name, TypeSchema(**GROUPS_SCHEMA)
|
||||
)
|
||||
openapi_spec.components.schemas.setdefault(name, TypeSchema(**GROUPS_SCHEMA))
|
||||
ref = f"#/components/schemas/{name}"
|
||||
elif name in [
|
||||
"GroupsCreateResponse",
|
||||
|
@ -147,9 +147,7 @@ GROUP_SNAPSHOT_CONTAINER_SCHEMA: dict[str, Any] = {
|
||||
}
|
||||
|
||||
|
||||
def _post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path: str | None = None
|
||||
):
|
||||
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
operationId = operation_spec.operationId
|
||||
if operationId in [
|
||||
@ -159,9 +157,7 @@ def _post_process_operation_hook(
|
||||
"group_snapshots/detail:get",
|
||||
]:
|
||||
for key, val in GROUP_SNAPSHOT_LIST_PARAMETERS.items():
|
||||
openapi_spec.components.parameters.setdefault(
|
||||
key, ParameterSchema(**val)
|
||||
)
|
||||
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
|
||||
ref = f"#/components/parameters/{key}"
|
||||
if ref not in [x.ref for x in operation_spec.parameters]:
|
||||
operation_spec.parameters.append(ParameterSchema(ref=ref))
|
||||
|
@ -114,9 +114,7 @@ GROUP_TYPE_CONTAINER_SCHEMA: dict[str, Any] = {
|
||||
}
|
||||
|
||||
|
||||
def _post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path: str | None = None
|
||||
):
|
||||
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
operationId = operation_spec.operationId
|
||||
if operationId in [
|
||||
@ -126,9 +124,7 @@ def _post_process_operation_hook(
|
||||
"group_types/detail:get",
|
||||
]:
|
||||
for key, val in GROUP_TYPE_LIST_PARAMETERS.items():
|
||||
openapi_spec.components.parameters.setdefault(
|
||||
key, ParameterSchema(**val)
|
||||
)
|
||||
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
|
||||
ref = f"#/components/parameters/{key}"
|
||||
if ref not in [x.ref for x in operation_spec.parameters]:
|
||||
operation_spec.parameters.append(ParameterSchema(ref=ref))
|
||||
|
@ -102,9 +102,7 @@ def _get_schema_ref(
|
||||
mime_type: str = "application/json"
|
||||
ref: str
|
||||
if name == "LimitsListResponse":
|
||||
openapi_spec.components.schemas.setdefault(
|
||||
name, TypeSchema(**LIMITS_SCHEMA)
|
||||
)
|
||||
openapi_spec.components.schemas.setdefault(name, TypeSchema(**LIMITS_SCHEMA))
|
||||
ref = f"#/components/schemas/{name}"
|
||||
else:
|
||||
return (None, None, False)
|
||||
|
@ -88,9 +88,7 @@ MESSAGE_CONTAINER_SCHEMA: dict[str, Any] = {
|
||||
}
|
||||
|
||||
|
||||
def _post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path: str | None = None
|
||||
):
|
||||
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
operationId = operation_spec.operationId
|
||||
if operationId in [
|
||||
@ -120,9 +118,7 @@ def _get_schema_ref(
|
||||
mime_type: str = "application/json"
|
||||
ref: str
|
||||
if name == "MessagesListResponse":
|
||||
openapi_spec.components.schemas.setdefault(
|
||||
name, TypeSchema(**MESSAGES_SCHEMA)
|
||||
)
|
||||
openapi_spec.components.schemas.setdefault(name, TypeSchema(**MESSAGES_SCHEMA))
|
||||
ref = f"#/components/schemas/{name}"
|
||||
if name == "MessageShowResponse":
|
||||
openapi_spec.components.schemas.setdefault(
|
||||
|
@ -120,9 +120,7 @@ QOS_SPEC_LIST_PARAMETERS: dict[str, Any] = {
|
||||
}
|
||||
|
||||
|
||||
def _post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path: str | None = None
|
||||
):
|
||||
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
operationId = operation_spec.operationId
|
||||
|
||||
@ -131,9 +129,7 @@ def _post_process_operation_hook(
|
||||
"qos-specs:get",
|
||||
]:
|
||||
for key, val in QOS_SPEC_LIST_PARAMETERS.items():
|
||||
openapi_spec.components.parameters.setdefault(
|
||||
key, ParameterSchema(**val)
|
||||
)
|
||||
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
|
||||
ref = f"#/components/parameters/{key}"
|
||||
if ref not in [x.ref for x in operation_spec.parameters]:
|
||||
operation_spec.parameters.append(ParameterSchema(ref=ref))
|
||||
|
@ -156,9 +156,7 @@ SNAPSHOTS_DETAIL_SCHEMA: dict[str, Any] = {
|
||||
}
|
||||
|
||||
|
||||
def _post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path: str | None = None
|
||||
):
|
||||
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
operationId = operation_spec.operationId
|
||||
if operationId in [
|
||||
@ -194,9 +192,7 @@ def _get_schema_ref(
|
||||
ref: str
|
||||
# ### Snapshot
|
||||
if name == "SnapshotsListResponse":
|
||||
openapi_spec.components.schemas.setdefault(
|
||||
name, TypeSchema(**SNAPSHOTS_SCHEMA)
|
||||
)
|
||||
openapi_spec.components.schemas.setdefault(name, TypeSchema(**SNAPSHOTS_SCHEMA))
|
||||
ref = f"#/components/schemas/{name}"
|
||||
if name == "SnapshotsDetailResponse":
|
||||
openapi_spec.components.schemas.setdefault(
|
||||
|
@ -15,7 +15,6 @@ from typing import Any
|
||||
|
||||
from cinder.api.schemas import snapshot_manage as cinder_snapshot_manage
|
||||
|
||||
from codegenerator.openapi.cinder_schemas import common
|
||||
from codegenerator.openapi.cinder_schemas import snapshot
|
||||
|
||||
from codegenerator.common.schema import ParameterSchema
|
||||
@ -109,9 +108,7 @@ MANAGEABLE_SNAPSHOT_CREATE_REQUEST_SCHEMA: dict[str, Any] = copy.deepcopy(
|
||||
MANAGEABLE_SNAPSHOT_CREATE_REQUEST_SCHEMA["properties"].pop("type", None)
|
||||
|
||||
|
||||
def _post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path: str | None = None
|
||||
):
|
||||
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
operationId = operation_spec.operationId
|
||||
if operationId in [
|
||||
|
@ -388,9 +388,7 @@ VOLUME_UPLOAD_IMAGE_RESPONSE_SCHEMA: dict[str, Any] = {
|
||||
}
|
||||
|
||||
|
||||
def _post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path: str | None = None
|
||||
):
|
||||
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
operationId = operation_spec.operationId
|
||||
if operationId in [
|
||||
@ -437,9 +435,7 @@ def _get_schema_ref(
|
||||
ref: str
|
||||
# ### Volume
|
||||
if name == "VolumesListResponse":
|
||||
openapi_spec.components.schemas.setdefault(
|
||||
name, TypeSchema(**VOLUMES_SCHEMA)
|
||||
)
|
||||
openapi_spec.components.schemas.setdefault(name, TypeSchema(**VOLUMES_SCHEMA))
|
||||
ref = f"#/components/schemas/{name}"
|
||||
if name == "VolumesDetailResponse":
|
||||
openapi_spec.components.schemas.setdefault(
|
||||
|
@ -87,9 +87,7 @@ MANAGEABLE_VOLUMES_DETAIL_SCHEMA: dict[str, Any] = {
|
||||
}
|
||||
|
||||
|
||||
def _post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path: str | None = None
|
||||
):
|
||||
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
operationId = operation_spec.operationId
|
||||
if operationId in [
|
||||
|
@ -12,7 +12,6 @@
|
||||
#
|
||||
from typing import Any
|
||||
|
||||
from cinder.api.validation import parameter_types
|
||||
|
||||
from codegenerator.common.schema import ParameterSchema
|
||||
from codegenerator.common.schema import TypeSchema
|
||||
@ -99,9 +98,7 @@ VOLUME_TRANSFER_CONTAINER_SCHEMA: dict[str, Any] = {
|
||||
|
||||
VOLUME_TRANSFERS_SCHEMA: dict[str, Any] = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"transfers": {"type": "array", "items": VOLUME_TRANSFER_SCHEMA}
|
||||
},
|
||||
"properties": {"transfers": {"type": "array", "items": VOLUME_TRANSFER_SCHEMA}},
|
||||
}
|
||||
|
||||
OS_VOLUME_TRANSFERS_DETAIL_SCHEMA: dict[str, Any] = {
|
||||
@ -173,9 +170,7 @@ VOLUME_TRANSFER_LIST_PARAMETERS: dict[str, Any] = {
|
||||
}
|
||||
|
||||
|
||||
def _post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path: str | None = None
|
||||
):
|
||||
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
operationId = operation_spec.operationId
|
||||
|
||||
@ -184,9 +179,7 @@ def _post_process_operation_hook(
|
||||
"volume-transfers/detail:get",
|
||||
]:
|
||||
for key, val in VOLUME_TRANSFER_LIST_PARAMETERS.items():
|
||||
openapi_spec.components.parameters.setdefault(
|
||||
key, ParameterSchema(**val)
|
||||
)
|
||||
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
|
||||
ref = f"#/components/parameters/{key}"
|
||||
if ref not in [x.ref for x in operation_spec.parameters]:
|
||||
operation_spec.parameters.append(ParameterSchema(ref=ref))
|
||||
@ -198,9 +191,7 @@ def _post_process_operation_hook(
|
||||
# structure and just copy single param.
|
||||
key = "transfer_all_tenants"
|
||||
val = VOLUME_TRANSFER_LIST_PARAMETERS[key]
|
||||
openapi_spec.components.parameters.setdefault(
|
||||
key, ParameterSchema(**val)
|
||||
)
|
||||
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
|
||||
ref = f"#/components/parameters/{key}"
|
||||
if ref not in [x.ref for x in operation_spec.parameters]:
|
||||
operation_spec.parameters.append(ParameterSchema(ref=ref))
|
||||
|
@ -60,9 +60,7 @@ VOLUME_TYPE_CONTAINER_SCHEMA: dict[str, Any] = {
|
||||
|
||||
VOLUME_TYPES_SCHEMA: dict[str, Any] = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"volume_types": {"type": "array", "items": VOLUME_TYPE_SCHEMA}
|
||||
},
|
||||
"properties": {"volume_types": {"type": "array", "items": VOLUME_TYPE_SCHEMA}},
|
||||
}
|
||||
|
||||
VOLUME_TYPE_LIST_PARAMETERS: dict[str, Any] = {
|
||||
@ -228,9 +226,7 @@ DEFAULT_TYPE_SCHEMA: dict[str, Any] = {
|
||||
|
||||
DEFAULT_TYPES_SCHEMA: dict[str, Any] = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"default_types": {"type": "array", "items": DEFAULT_TYPE_SCHEMA}
|
||||
},
|
||||
"properties": {"default_types": {"type": "array", "items": DEFAULT_TYPE_SCHEMA}},
|
||||
}
|
||||
|
||||
DEFAULT_TYPE_CONTAINER_SCHEMA: dict[str, Any] = {
|
||||
@ -239,9 +235,7 @@ DEFAULT_TYPE_CONTAINER_SCHEMA: dict[str, Any] = {
|
||||
}
|
||||
|
||||
|
||||
def _post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path: str | None = None
|
||||
):
|
||||
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
operationId = operation_spec.operationId
|
||||
|
||||
@ -249,9 +243,7 @@ def _post_process_operation_hook(
|
||||
"project_id/types:get",
|
||||
]:
|
||||
for key, val in VOLUME_TYPE_LIST_PARAMETERS.items():
|
||||
openapi_spec.components.parameters.setdefault(
|
||||
key, ParameterSchema(**val)
|
||||
)
|
||||
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
|
||||
ref = f"#/components/parameters/{key}"
|
||||
if ref not in [x.ref for x in operation_spec.parameters]:
|
||||
operation_spec.parameters.append(ParameterSchema(ref=ref))
|
||||
|
@ -91,9 +91,7 @@ IMAGE_PARAMETERS = {
|
||||
"status": {
|
||||
"in": "query",
|
||||
"name": "status",
|
||||
"description": LiteralScalarString(
|
||||
"Filters the response by an image status."
|
||||
),
|
||||
"description": LiteralScalarString("Filters the response by an image status."),
|
||||
"schema": {"type": "string"},
|
||||
},
|
||||
"size_min": {
|
||||
@ -311,9 +309,7 @@ class GlanceGenerator(OpenStackServerSourceBase):
|
||||
|
||||
# Set global headers and parameters
|
||||
for name, definition in IMAGE_PARAMETERS.items():
|
||||
openapi_spec.components.parameters[name] = ParameterSchema(
|
||||
**definition
|
||||
)
|
||||
openapi_spec.components.parameters[name] = ParameterSchema(**definition)
|
||||
for name, definition in IMAGE_HEADERS.items():
|
||||
openapi_spec.components.headers[name] = HeaderSchema(**definition)
|
||||
|
||||
@ -376,9 +372,7 @@ class GlanceGenerator(OpenStackServerSourceBase):
|
||||
key = "OpenStack-image-store-ids"
|
||||
ref = f"#/components/headers/{key}"
|
||||
operation_spec.responses["201"].setdefault("headers", {})
|
||||
operation_spec.responses["201"]["headers"].update(
|
||||
{key: {"$ref": ref}}
|
||||
)
|
||||
operation_spec.responses["201"]["headers"].update({key: {"$ref": ref}})
|
||||
|
||||
elif operationId == "images/image_id/file:put":
|
||||
for ref in [
|
||||
@ -439,9 +433,7 @@ class GlanceGenerator(OpenStackServerSourceBase):
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": copy.deepcopy(
|
||||
schema_def.properties
|
||||
),
|
||||
"properties": copy.deepcopy(schema_def.properties),
|
||||
},
|
||||
},
|
||||
},
|
||||
@ -459,9 +451,7 @@ class GlanceGenerator(OpenStackServerSourceBase):
|
||||
openapi_spec.components.schemas.setdefault(
|
||||
name,
|
||||
self._get_glance_schema(
|
||||
glance_schema.CollectionSchema(
|
||||
"tasks", tasks.get_task_schema()
|
||||
),
|
||||
glance_schema.CollectionSchema("tasks", tasks.get_task_schema()),
|
||||
name,
|
||||
),
|
||||
)
|
||||
@ -480,9 +470,7 @@ class GlanceGenerator(OpenStackServerSourceBase):
|
||||
"uri": {"type": "string"},
|
||||
"glance_image_id": {"type": "string"},
|
||||
"glance_region": {"type": "string"},
|
||||
"glance_service_interface": {
|
||||
"type": "string"
|
||||
},
|
||||
"glance_service_interface": {"type": "string"},
|
||||
},
|
||||
},
|
||||
"stores": {
|
||||
@ -508,9 +496,7 @@ class GlanceGenerator(OpenStackServerSourceBase):
|
||||
elif name == "ImagesMembersListResponse":
|
||||
openapi_spec.components.schemas.setdefault(
|
||||
name,
|
||||
self._get_glance_schema(
|
||||
image_members.get_collection_schema(), name
|
||||
),
|
||||
self._get_glance_schema(image_members.get_collection_schema(), name),
|
||||
)
|
||||
ref = f"#/components/schemas/{name}"
|
||||
elif name in [
|
||||
@ -609,9 +595,7 @@ class GlanceGenerator(OpenStackServerSourceBase):
|
||||
]:
|
||||
openapi_spec.components.schemas.setdefault(
|
||||
name,
|
||||
self._get_glance_schema(
|
||||
metadef_objects.get_collection_schema(), name
|
||||
),
|
||||
self._get_glance_schema(metadef_objects.get_collection_schema(), name),
|
||||
)
|
||||
ref = f"#/components/schemas/{name}"
|
||||
elif name in [
|
||||
@ -679,9 +663,7 @@ class GlanceGenerator(OpenStackServerSourceBase):
|
||||
]:
|
||||
openapi_spec.components.schemas.setdefault(
|
||||
name,
|
||||
self._get_glance_schema(
|
||||
metadef_tags.get_collection_schema(), name
|
||||
),
|
||||
self._get_glance_schema(metadef_tags.get_collection_schema(), name),
|
||||
)
|
||||
ref = f"#/components/schemas/{name}"
|
||||
elif name == "ImageUpdateRequest":
|
||||
@ -752,9 +734,9 @@ class GlanceGenerator(OpenStackServerSourceBase):
|
||||
for field in i32_fixes:
|
||||
res["properties"][field]["format"] = "int64"
|
||||
elif name == "MetadefsNamespacesPropertiesListResponse":
|
||||
res["properties"]["properties"]["additionalProperties"][
|
||||
"type"
|
||||
] = "object"
|
||||
res["properties"]["properties"]["additionalProperties"]["type"] = (
|
||||
"object"
|
||||
)
|
||||
return TypeSchema(**res)
|
||||
|
||||
@classmethod
|
||||
|
@ -21,7 +21,7 @@ from codegenerator.common.schema import ParameterSchema
|
||||
from codegenerator.common.schema import PathSchema
|
||||
from codegenerator.common.schema import SpecSchema
|
||||
from codegenerator.common.schema import TypeSchema
|
||||
from codegenerator.openapi.base import OpenStackServerSourceBase, UNSET
|
||||
from codegenerator.openapi.base import OpenStackServerSourceBase
|
||||
from codegenerator.openapi.keystone_schemas import application_credential
|
||||
from codegenerator.openapi.keystone_schemas import auth
|
||||
from codegenerator.openapi.keystone_schemas import common
|
||||
@ -158,9 +158,7 @@ class KeystoneGenerator(OpenStackServerSourceBase):
|
||||
self._sanitize_param_ver_info(openapi_spec, self.min_api_version)
|
||||
|
||||
if args.api_ref_src:
|
||||
merge_api_ref_doc(
|
||||
openapi_spec, args.api_ref_src, allow_strip_version=False
|
||||
)
|
||||
merge_api_ref_doc(openapi_spec, args.api_ref_src, allow_strip_version=False)
|
||||
|
||||
self.dump_openapi(openapi_spec, impl_path, args.validate)
|
||||
|
||||
@ -207,14 +205,10 @@ class KeystoneGenerator(OpenStackServerSourceBase):
|
||||
for path_element in path_elements:
|
||||
if "{" in path_element:
|
||||
param_name = path_element.strip("{}")
|
||||
global_param_name = (
|
||||
"_".join(path_resource_names) + f"_{param_name}"
|
||||
)
|
||||
global_param_name = "_".join(path_resource_names) + f"_{param_name}"
|
||||
param_ref_name = f"#/components/parameters/{global_param_name}"
|
||||
# Ensure reference to the param is in the path_params
|
||||
if param_ref_name not in [
|
||||
k.ref for k in [p for p in path_params]
|
||||
]:
|
||||
if param_ref_name not in [k.ref for k in [p for p in path_params]]:
|
||||
path_params.append(ParameterSchema(ref=param_ref_name))
|
||||
# Ensure global parameter is present
|
||||
path_param = ParameterSchema(
|
||||
@ -228,25 +222,17 @@ class KeystoneGenerator(OpenStackServerSourceBase):
|
||||
# We can only assume the param type. For path it is logically a string only
|
||||
path_param.type_schema = TypeSchema(type="string")
|
||||
# For non /users/{id} urls link user_id path attribute to the user resource
|
||||
if path_param.name == "user_id" and path_resource_names != [
|
||||
"users"
|
||||
]:
|
||||
if path_param.name == "user_id" and path_resource_names != ["users"]:
|
||||
if not path_param.openstack:
|
||||
path_param.openstack = {}
|
||||
path_param.openstack["resource_link"] = (
|
||||
"identity/v3/user.id"
|
||||
)
|
||||
path_param.openstack["resource_link"] = "identity/v3/user.id"
|
||||
if path_param.name == "domain_id" and path_resource_names != [
|
||||
"domains"
|
||||
]:
|
||||
if not path_param.openstack:
|
||||
path_param.openstack = {}
|
||||
path_param.openstack["resource_link"] = (
|
||||
"identity/v3/domain.id"
|
||||
)
|
||||
openapi_spec.components.parameters[global_param_name] = (
|
||||
path_param
|
||||
)
|
||||
path_param.openstack["resource_link"] = "identity/v3/domain.id"
|
||||
openapi_spec.components.parameters[global_param_name] = path_param
|
||||
if len(path_elements) == 0:
|
||||
path_resource_names.append("root")
|
||||
elif path_elements[-1].startswith("{"):
|
||||
@ -277,17 +263,13 @@ class KeystoneGenerator(OpenStackServerSourceBase):
|
||||
elif path == "/v3":
|
||||
operation_id_prefix = "version"
|
||||
else:
|
||||
operation_id_prefix = "/".join(
|
||||
[x.strip("{}") for x in path_elements]
|
||||
)
|
||||
operation_id_prefix = "/".join([x.strip("{}") for x in path_elements])
|
||||
for method in route.methods:
|
||||
if method == "OPTIONS":
|
||||
# Not sure what should be done with it
|
||||
continue
|
||||
if controller:
|
||||
func = getattr(
|
||||
controller, method.replace("HEAD", "GET").lower(), None
|
||||
)
|
||||
func = getattr(controller, method.replace("HEAD", "GET").lower(), None)
|
||||
else:
|
||||
func = view
|
||||
# Set operationId
|
||||
@ -384,7 +366,6 @@ class KeystoneGenerator(OpenStackServerSourceBase):
|
||||
response_code = None
|
||||
start_version = None
|
||||
end_version = None
|
||||
deser_schema: dict = {}
|
||||
ser_schema: dict | None = {}
|
||||
|
||||
(
|
||||
@ -405,9 +386,7 @@ class KeystoneGenerator(OpenStackServerSourceBase):
|
||||
if query_params_versions:
|
||||
so = sorted(
|
||||
query_params_versions,
|
||||
key=lambda d: (
|
||||
tuple(map(int, d[1].split("."))) if d[1] else (0, 0)
|
||||
),
|
||||
key=lambda d: (tuple(map(int, d[1].split("."))) if d[1] else (0, 0)),
|
||||
)
|
||||
for data, min_ver, max_ver in so:
|
||||
self.process_query_parameters(
|
||||
@ -491,9 +470,7 @@ class KeystoneGenerator(OpenStackServerSourceBase):
|
||||
operation_spec.security = []
|
||||
elif method == "GET":
|
||||
operation_spec.parameters.append(
|
||||
ParameterSchema(
|
||||
ref="#/components/parameters/X-Subject-Token"
|
||||
)
|
||||
ParameterSchema(ref="#/components/parameters/X-Subject-Token")
|
||||
)
|
||||
rsp_headers.setdefault(
|
||||
"X-Subject-Token",
|
||||
@ -505,9 +482,7 @@ class KeystoneGenerator(OpenStackServerSourceBase):
|
||||
if tag not in [x["name"] for x in openapi_spec.tags]:
|
||||
openapi_spec.tags.append({"name": tag, "description": None})
|
||||
|
||||
self._post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path=path
|
||||
)
|
||||
self._post_process_operation_hook(openapi_spec, operation_spec, path=path)
|
||||
|
||||
def _post_process_operation_hook(
|
||||
self, openapi_spec, operation_spec, path: str | None = None
|
||||
|
@ -83,9 +83,9 @@ APPLICATION_CREDENTIAL_CREATE_RESPONSE_SCHEMA: dict[str, Any] = {
|
||||
},
|
||||
}
|
||||
# Update `secret` field
|
||||
APPLICATION_CREDENTIAL_CREATE_RESPONSE_SCHEMA["properties"][
|
||||
"application_credential"
|
||||
]["properties"]["secret"] = {
|
||||
APPLICATION_CREDENTIAL_CREATE_RESPONSE_SCHEMA["properties"]["application_credential"][
|
||||
"properties"
|
||||
]["secret"] = {
|
||||
"type": "string",
|
||||
"description": "The secret for the application credential, either generated by the server or provided by the user. This is only ever shown once in the response to a create request. It is not stored nor ever shown again. If the secret is lost, a new application credential must be created.",
|
||||
}
|
||||
@ -110,9 +110,7 @@ APPLICATION_CREDENTIALS_LIST_PARAMETERS = {
|
||||
}
|
||||
|
||||
|
||||
def _post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path: str | None = None
|
||||
):
|
||||
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
operationId = operation_spec.operationId
|
||||
if operationId == "users/user_id/application_credentials:get":
|
||||
@ -120,9 +118,7 @@ def _post_process_operation_hook(
|
||||
key,
|
||||
val,
|
||||
) in APPLICATION_CREDENTIALS_LIST_PARAMETERS.items():
|
||||
openapi_spec.components.parameters.setdefault(
|
||||
key, ParameterSchema(**val)
|
||||
)
|
||||
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
|
||||
ref = f"#/components/parameters/{key}"
|
||||
if ref not in [x.ref for x in operation_spec.parameters]:
|
||||
operation_spec.parameters.append(ParameterSchema(ref=ref))
|
||||
|
@ -546,9 +546,7 @@ AUTH_RECEIPT_SCHEMA: dict[str, Any] = {
|
||||
}
|
||||
|
||||
|
||||
def _post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path: str | None = None
|
||||
):
|
||||
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
operationId = operation_spec.operationId
|
||||
|
||||
@ -563,9 +561,7 @@ def _post_process_operation_hook(
|
||||
"$ref": "#/components/headers/Openstack-Auth-Receipt"
|
||||
}
|
||||
},
|
||||
"content": {
|
||||
receipt_mime_type: {"schema": {"$ref": receipt_schema_ref}}
|
||||
},
|
||||
"content": {receipt_mime_type: {"schema": {"$ref": receipt_schema_ref}}},
|
||||
}
|
||||
|
||||
|
||||
|
@ -33,9 +33,7 @@ LINKS_SCHEMA: dict[str, Any] = {
|
||||
}
|
||||
|
||||
|
||||
TAG_SCHEMA: dict[str, Any] = copy.deepcopy(
|
||||
ks_schema._project_tag_name_properties
|
||||
)
|
||||
TAG_SCHEMA: dict[str, Any] = copy.deepcopy(ks_schema._project_tag_name_properties)
|
||||
|
||||
TAGS_SCHEMA: dict[str, Any] = {
|
||||
"type": "object",
|
||||
|
@ -119,9 +119,7 @@ DOMAIN_LIST_PARAMETERS: dict[str, dict] = {
|
||||
}
|
||||
|
||||
|
||||
def _post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path: str | None = None
|
||||
):
|
||||
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
|
||||
operationId = operation_spec.operationId
|
||||
@ -130,9 +128,7 @@ def _post_process_operation_hook(
|
||||
key,
|
||||
val,
|
||||
) in DOMAIN_LIST_PARAMETERS.items():
|
||||
openapi_spec.components.parameters.setdefault(
|
||||
key, ParameterSchema(**val)
|
||||
)
|
||||
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
|
||||
ref = f"#/components/parameters/{key}"
|
||||
if ref not in [x.ref for x in operation_spec.parameters]:
|
||||
operation_spec.parameters.append(ParameterSchema(ref=ref))
|
||||
@ -160,9 +156,7 @@ def _get_schema_ref(
|
||||
)
|
||||
ref = "#/components/schemas/Domain"
|
||||
elif name == "DomainsGetResponse":
|
||||
openapi_spec.components.schemas.setdefault(
|
||||
name, TypeSchema(**DOMAINS_SCHEMA)
|
||||
)
|
||||
openapi_spec.components.schemas.setdefault(name, TypeSchema(**DOMAINS_SCHEMA))
|
||||
ref = f"#/components/schemas/{name}"
|
||||
|
||||
# Domain Config
|
||||
|
@ -90,9 +90,7 @@ ENDPOINTS_LIST_PARAMETERS = {
|
||||
},
|
||||
}
|
||||
|
||||
ENDPOINT_CREATE_SCHEMA: dict[str, Any] = copy.deepcopy(
|
||||
ENDPOINT_CONTAINER_SCHEMA
|
||||
)
|
||||
ENDPOINT_CREATE_SCHEMA: dict[str, Any] = copy.deepcopy(ENDPOINT_CONTAINER_SCHEMA)
|
||||
ENDPOINT_CREATE_SCHEMA["properties"]["endpoint"]["properties"].pop("id")
|
||||
ENDPOINT_CREATE_SCHEMA["properties"]["endpoint"]["required"] = [
|
||||
"interface",
|
||||
@ -101,9 +99,7 @@ ENDPOINT_CREATE_SCHEMA["properties"]["endpoint"]["required"] = [
|
||||
]
|
||||
|
||||
|
||||
def _post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path: str | None = None
|
||||
):
|
||||
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
operationId = operation_spec.operationId
|
||||
if operationId == "endpoints:get":
|
||||
@ -111,9 +107,7 @@ def _post_process_operation_hook(
|
||||
key,
|
||||
val,
|
||||
) in ENDPOINTS_LIST_PARAMETERS.items():
|
||||
openapi_spec.components.parameters.setdefault(
|
||||
key, ParameterSchema(**val)
|
||||
)
|
||||
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
|
||||
ref = f"#/components/parameters/{key}"
|
||||
if ref not in [x.ref for x in operation_spec.parameters]:
|
||||
operation_spec.parameters.append(ParameterSchema(ref=ref))
|
||||
|
@ -61,16 +61,12 @@ IDENTITY_PROVIDER_CONTAINER_SCHEMA: dict[str, Any] = {
|
||||
|
||||
IDENTITY_PROVIDER_CREATE_SCHEMA: dict[str, Any] = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"identity_provider": federation_schema.identity_provider_create
|
||||
},
|
||||
"properties": {"identity_provider": federation_schema.identity_provider_create},
|
||||
}
|
||||
|
||||
IDENTITY_PROVIDER_UPDATE_SCHEMA: dict[str, Any] = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"identity_provider": federation_schema.identity_provider_update
|
||||
},
|
||||
"properties": {"identity_provider": federation_schema.identity_provider_update},
|
||||
}
|
||||
|
||||
IDENTITY_PROVIDERS_SCHEMA: dict[str, Any] = {
|
||||
@ -219,22 +215,16 @@ FEDERATION_SERVICE_PROVIDERS_SCHEMA: dict[str, Any] = {
|
||||
|
||||
FEDERATION_SERVICE_PROVIDER_CREATE_SCHEMA: dict[str, Any] = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"service_provider": federation_schema.service_provider_create
|
||||
},
|
||||
"properties": {"service_provider": federation_schema.service_provider_create},
|
||||
}
|
||||
|
||||
FEDERATION_SERVICE_PROVIDER_UPDATE_SCHEMA: dict[str, Any] = {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"service_provider": federation_schema.service_provider_update
|
||||
},
|
||||
"properties": {"service_provider": federation_schema.service_provider_update},
|
||||
}
|
||||
|
||||
|
||||
def _post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path: str | None = None
|
||||
):
|
||||
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
operationId = operation_spec.operationId
|
||||
if operationId == "OS-FEDERATION/identity_providers:get":
|
||||
@ -242,9 +232,7 @@ def _post_process_operation_hook(
|
||||
key,
|
||||
val,
|
||||
) in IDENTITY_PROVIDERS_LIST_PARAMETERS.items():
|
||||
openapi_spec.components.parameters.setdefault(
|
||||
key, ParameterSchema(**val)
|
||||
)
|
||||
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
|
||||
ref = f"#/components/parameters/{key}"
|
||||
if ref not in [x.ref for x in operation_spec.parameters]:
|
||||
operation_spec.parameters.append(ParameterSchema(ref=ref))
|
||||
|
@ -56,26 +56,20 @@ GROUP_USERS_LIST_PARAMETERS: dict[str, Any] = {
|
||||
}
|
||||
|
||||
|
||||
def _post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path: str | None = None
|
||||
):
|
||||
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
operationId = operation_spec.operationId
|
||||
|
||||
if operationId == "groups:get":
|
||||
for key, val in GROUPS_LIST_PARAMETERS.items():
|
||||
openapi_spec.components.parameters.setdefault(
|
||||
key, ParameterSchema(**val)
|
||||
)
|
||||
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
|
||||
ref = f"#/components/parameters/{key}"
|
||||
if ref not in [x.ref for x in operation_spec.parameters]:
|
||||
operation_spec.parameters.append(ParameterSchema(ref=ref))
|
||||
|
||||
elif operationId == "groups/group_id/users:get":
|
||||
for key, val in GROUP_USERS_LIST_PARAMETERS.items():
|
||||
openapi_spec.components.parameters.setdefault(
|
||||
key, ParameterSchema(**val)
|
||||
)
|
||||
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
|
||||
ref = f"#/components/parameters/{key}"
|
||||
if ref not in [x.ref for x in operation_spec.parameters]:
|
||||
operation_spec.parameters.append(ParameterSchema(ref=ref))
|
||||
@ -92,9 +86,7 @@ def _get_schema_ref(
|
||||
ref: str
|
||||
# Groups
|
||||
if name == "GroupsGetResponse":
|
||||
openapi_spec.components.schemas.setdefault(
|
||||
name, TypeSchema(**GROUPS_SCHEMA)
|
||||
)
|
||||
openapi_spec.components.schemas.setdefault(name, TypeSchema(**GROUPS_SCHEMA))
|
||||
ref = f"#/components/schemas/{name}"
|
||||
elif name in [
|
||||
"GroupsPostRequest",
|
||||
|
@ -83,9 +83,7 @@ PROJECT_LIST_PARAMETERS = {
|
||||
}
|
||||
|
||||
|
||||
def _post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path: str | None = None
|
||||
):
|
||||
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
operationId = operation_spec.operationId
|
||||
if operationId == "projects:get":
|
||||
@ -93,9 +91,7 @@ def _post_process_operation_hook(
|
||||
key,
|
||||
val,
|
||||
) in PROJECT_LIST_PARAMETERS.items():
|
||||
openapi_spec.components.parameters.setdefault(
|
||||
key, ParameterSchema(**val)
|
||||
)
|
||||
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
|
||||
ref = f"#/components/parameters/{key}"
|
||||
if ref not in [x.ref for x in operation_spec.parameters]:
|
||||
operation_spec.parameters.append(ParameterSchema(ref=ref))
|
||||
@ -124,9 +120,7 @@ def _get_schema_ref(
|
||||
)
|
||||
ref = "#/components/schemas/Project"
|
||||
elif name == "ProjectsGetResponse":
|
||||
openapi_spec.components.schemas.setdefault(
|
||||
name, TypeSchema(**PROJECTS_SCHEMA)
|
||||
)
|
||||
openapi_spec.components.schemas.setdefault(name, TypeSchema(**PROJECTS_SCHEMA))
|
||||
ref = f"#/components/schemas/{name}"
|
||||
|
||||
# Project Tags
|
||||
|
@ -56,9 +56,7 @@ REGIONS_LIST_PARAMETERS = {
|
||||
}
|
||||
|
||||
|
||||
def _post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path: str | None = None
|
||||
):
|
||||
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
operationId = operation_spec.operationId
|
||||
if operationId == "regions:get":
|
||||
@ -66,9 +64,7 @@ def _post_process_operation_hook(
|
||||
key,
|
||||
val,
|
||||
) in REGIONS_LIST_PARAMETERS.items():
|
||||
openapi_spec.components.parameters.setdefault(
|
||||
key, ParameterSchema(**val)
|
||||
)
|
||||
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
|
||||
ref = f"#/components/parameters/{key}"
|
||||
if ref not in [x.ref for x in operation_spec.parameters]:
|
||||
operation_spec.parameters.append(ParameterSchema(ref=ref))
|
||||
|
@ -298,9 +298,7 @@ ROLE_ASSIGNMENT_LIST_PARAMETERS: dict[str, Any] = {
|
||||
}
|
||||
|
||||
|
||||
def _post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path: str | None = None
|
||||
):
|
||||
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
operationId = operation_spec.operationId
|
||||
|
||||
@ -309,9 +307,7 @@ def _post_process_operation_hook(
|
||||
key,
|
||||
val,
|
||||
) in ROLE_LIST_PARAMETERS.items():
|
||||
openapi_spec.components.parameters.setdefault(
|
||||
key, ParameterSchema(**val)
|
||||
)
|
||||
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
|
||||
ref = f"#/components/parameters/{key}"
|
||||
|
||||
if ref not in [x.ref for x in operation_spec.parameters]:
|
||||
@ -337,9 +333,7 @@ def _post_process_operation_hook(
|
||||
key,
|
||||
val,
|
||||
) in ROLE_ASSIGNMENTS_QUERY_PARAMETERS.items():
|
||||
openapi_spec.components.parameters.setdefault(
|
||||
key, ParameterSchema(**val)
|
||||
)
|
||||
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
|
||||
ref = f"#/components/parameters/{key}"
|
||||
|
||||
if ref not in [x.ref for x in operation_spec.parameters]:
|
||||
@ -357,9 +351,7 @@ def _get_schema_ref(
|
||||
ref: str
|
||||
# Roles
|
||||
if name == "RolesGetResponse":
|
||||
openapi_spec.components.schemas.setdefault(
|
||||
name, TypeSchema(**ROLES_SCHEMA)
|
||||
)
|
||||
openapi_spec.components.schemas.setdefault(name, TypeSchema(**ROLES_SCHEMA))
|
||||
ref = f"#/components/schemas/{name}"
|
||||
elif name in [
|
||||
"RolesPostRequest",
|
||||
|
@ -64,9 +64,7 @@ SERVICES_LIST_PARAMETERS = {
|
||||
}
|
||||
|
||||
|
||||
def _post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path: str | None = None
|
||||
):
|
||||
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
operationId = operation_spec.operationId
|
||||
if operationId == "services:get":
|
||||
@ -74,9 +72,7 @@ def _post_process_operation_hook(
|
||||
key,
|
||||
val,
|
||||
) in SERVICES_LIST_PARAMETERS.items():
|
||||
openapi_spec.components.parameters.setdefault(
|
||||
key, ParameterSchema(**val)
|
||||
)
|
||||
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
|
||||
ref = f"#/components/parameters/{key}"
|
||||
if ref not in [x.ref for x in operation_spec.parameters]:
|
||||
operation_spec.parameters.append(ParameterSchema(ref=ref))
|
||||
|
@ -99,12 +99,12 @@ USER_PWD_CHANGE_SCHEMA: dict[str, Any] = {
|
||||
}
|
||||
|
||||
# Set `password` format for password change operation
|
||||
USER_PWD_CHANGE_SCHEMA["properties"]["user"]["properties"]["password"][
|
||||
USER_PWD_CHANGE_SCHEMA["properties"]["user"]["properties"]["password"]["format"] = (
|
||||
"password"
|
||||
)
|
||||
USER_PWD_CHANGE_SCHEMA["properties"]["user"]["properties"]["original_password"][
|
||||
"format"
|
||||
] = "password"
|
||||
USER_PWD_CHANGE_SCHEMA["properties"]["user"]["properties"][
|
||||
"original_password"
|
||||
]["format"] = "password"
|
||||
|
||||
USER_GROUP_SCHEMA: dict[str, Any] = {
|
||||
"type": "object",
|
||||
@ -188,17 +188,13 @@ USER_PROJECTS_SCHEMA: dict[str, Any] = {
|
||||
}
|
||||
|
||||
|
||||
def _post_process_operation_hook(
|
||||
openapi_spec, operation_spec, path: str | None = None
|
||||
):
|
||||
def _post_process_operation_hook(openapi_spec, operation_spec, path: str | None = None):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
operationId = operation_spec.operationId
|
||||
|
||||
if operationId == "users:get":
|
||||
for key, val in USER_LIST_PARAMETERS.items():
|
||||
openapi_spec.components.parameters.setdefault(
|
||||
key, ParameterSchema(**val)
|
||||
)
|
||||
openapi_spec.components.parameters.setdefault(key, ParameterSchema(**val))
|
||||
ref = f"#/components/parameters/{key}"
|
||||
if ref not in [x.ref for x in operation_spec.parameters]:
|
||||
operation_spec.parameters.append(ParameterSchema(ref=ref))
|
||||
@ -225,9 +221,7 @@ def _get_schema_ref(
|
||||
)
|
||||
ref = f"#/components/schemas/{name}"
|
||||
elif name == "UsersGetResponse":
|
||||
openapi_spec.components.schemas.setdefault(
|
||||
name, TypeSchema(**USERS_SCHEMA)
|
||||
)
|
||||
openapi_spec.components.schemas.setdefault(name, TypeSchema(**USERS_SCHEMA))
|
||||
ref = f"#/components/schemas/{name}"
|
||||
elif name in ["UserGetResponse", "UserPostResponse", "UserPatchResponse"]:
|
||||
openapi_spec.components.schemas.setdefault(
|
||||
|
@ -38,7 +38,6 @@ class ManilaGenerator(OpenStackServerSourceBase):
|
||||
|
||||
def _generate(self, target_dir, args):
|
||||
import fixtures
|
||||
from oslo_config import cfg
|
||||
from oslo_config import fixture as config_fixture
|
||||
from oslo_concurrency import lockutils
|
||||
|
||||
@ -58,15 +57,11 @@ class ManilaGenerator(OpenStackServerSourceBase):
|
||||
lock_path = self.useFixture(fixtures.TempDir()).path
|
||||
self.fixture = self.useFixture(config_fixture.Config(lockutils.CONF))
|
||||
self.fixture.config(lock_path=lock_path, group="oslo_concurrency")
|
||||
self.fixture.config(
|
||||
disable_process_locking=True, group="oslo_concurrency"
|
||||
)
|
||||
self.fixture.config(disable_process_locking=True, group="oslo_concurrency")
|
||||
|
||||
rpc.init(CONF)
|
||||
|
||||
CONF.set_override(
|
||||
"backend_url", "file://" + lock_path, group="coordination"
|
||||
)
|
||||
CONF.set_override("backend_url", "file://" + lock_path, group="coordination")
|
||||
coordination.LOCK_COORDINATOR.start()
|
||||
|
||||
# config = cfg.ConfigOpts()
|
||||
|
@ -310,9 +310,7 @@ class NeutronGenerator(OpenStackServerSourceBase):
|
||||
name="sort_dir",
|
||||
location="query",
|
||||
description="Sort direction. This is an optional feature and may be silently ignored by the server.",
|
||||
type_schema=TypeSchema(
|
||||
type="string", enum=["asc", "desc"]
|
||||
),
|
||||
type_schema=TypeSchema(type="string", enum=["asc", "desc"]),
|
||||
),
|
||||
},
|
||||
schemas={},
|
||||
@ -365,9 +363,7 @@ class NeutronGenerator(OpenStackServerSourceBase):
|
||||
|
||||
# merge descriptions from api-ref doc
|
||||
if args.api_ref_src:
|
||||
merge_api_ref_doc(
|
||||
openapi_spec, args.api_ref_src, allow_strip_version=False
|
||||
)
|
||||
merge_api_ref_doc(openapi_spec, args.api_ref_src, allow_strip_version=False)
|
||||
|
||||
self.dump_openapi(openapi_spec, Path(impl_path), args.validate)
|
||||
|
||||
@ -382,50 +378,39 @@ class NeutronGenerator(OpenStackServerSourceBase):
|
||||
# continue
|
||||
# if "networks" not in route.routepath:
|
||||
# continue
|
||||
if route.routepath.endswith("/edit") or route.routepath.endswith(
|
||||
"/new"
|
||||
):
|
||||
if route.routepath.endswith("/edit") or route.routepath.endswith("/new"):
|
||||
# NEUTRON folks - please fix
|
||||
logging.warning(
|
||||
"Skipping processing %s route", route.routepath
|
||||
)
|
||||
logging.warning("Skipping processing %s route", route.routepath)
|
||||
continue
|
||||
if (
|
||||
"/qos/ports" in route.routepath
|
||||
or "/qos/networks" in route.routepath
|
||||
):
|
||||
if "/qos/ports" in route.routepath or "/qos/networks" in route.routepath:
|
||||
# NEUTRON folks - please fix
|
||||
logging.warning(
|
||||
"Skipping processing %s route", route.routepath
|
||||
)
|
||||
logging.warning("Skipping processing %s route", route.routepath)
|
||||
continue
|
||||
if (
|
||||
route.routepath.endswith("/tags")
|
||||
and route.conditions["method"][0] == "POST"
|
||||
):
|
||||
logging.warning(
|
||||
"Skipping processing POST %s route", route.routepath
|
||||
)
|
||||
logging.warning("Skipping processing POST %s route", route.routepath)
|
||||
continue
|
||||
if route.routepath.startswith("/extensions") and route.conditions[
|
||||
if route.routepath.startswith("/extensions") and route.conditions["method"][
|
||||
0
|
||||
] in ["POST", "DELETE", "PUT"]:
|
||||
continue
|
||||
if route.routepath.startswith("/availability_zones") and route.conditions[
|
||||
"method"
|
||||
][0] in ["POST", "DELETE", "PUT"]:
|
||||
continue
|
||||
if route.routepath.startswith(
|
||||
"/availability_zones"
|
||||
) and route.conditions["method"][0] in ["POST", "DELETE", "PUT"]:
|
||||
continue
|
||||
if route.routepath.startswith(
|
||||
"/availability_zones/"
|
||||
) and route.conditions["method"][0] in ["GET"]:
|
||||
if route.routepath.startswith("/availability_zones/") and route.conditions[
|
||||
"method"
|
||||
][0] in ["GET"]:
|
||||
# There is no "show" for AZ
|
||||
continue
|
||||
if route.routepath in ["/quotas/tenant", "/quotas/project"]:
|
||||
# Tenant and Project quota are not a thing
|
||||
continue
|
||||
if route.routepath == "/quotas" and route.conditions["method"][
|
||||
0
|
||||
] in ["POST"]:
|
||||
if route.routepath == "/quotas" and route.conditions["method"][0] in [
|
||||
"POST"
|
||||
]:
|
||||
# Tenant and Project quota is the same
|
||||
continue
|
||||
|
||||
@ -512,11 +497,7 @@ class NeutronGenerator(OpenStackServerSourceBase):
|
||||
|
||||
if "method" not in route.conditions:
|
||||
raise RuntimeError("Method not set for %s" % route)
|
||||
method = (
|
||||
route.conditions.get("method", "GET")[0]
|
||||
if route.conditions
|
||||
else "GET"
|
||||
)
|
||||
method = route.conditions.get("method", "GET")[0] if route.conditions else "GET"
|
||||
|
||||
wsgi_controller = controller or route.defaults["controller"]
|
||||
# collection_name = route.collection_name
|
||||
@ -542,9 +523,7 @@ class NeutronGenerator(OpenStackServerSourceBase):
|
||||
logging.warning("Skipping duplicated route %s", processed_key)
|
||||
return
|
||||
|
||||
logging.info(
|
||||
"Path: %s; method: %s; operation: %s", path, method, action
|
||||
)
|
||||
logging.info("Path: %s; method: %s; operation: %s", path, method, action)
|
||||
|
||||
# Get Path elements
|
||||
path_elements: list[str] = list(filter(None, path.split("/")))
|
||||
@ -575,19 +554,15 @@ class NeutronGenerator(OpenStackServerSourceBase):
|
||||
for path_element in path_elements:
|
||||
if "{" in path_element:
|
||||
param_name = path_element.strip("{}")
|
||||
global_param_name = (
|
||||
f"{global_param_name_prefix}_{param_name}".replace(
|
||||
":", "_"
|
||||
)
|
||||
global_param_name = f"{global_param_name_prefix}_{param_name}".replace(
|
||||
":", "_"
|
||||
)
|
||||
|
||||
if global_param_name == "_project_id":
|
||||
global_param_name = "project_id"
|
||||
param_ref_name = f"#/components/parameters/{global_param_name}"
|
||||
# Ensure reference to the param is in the path_params
|
||||
if param_ref_name not in [
|
||||
k.ref for k in [p for p in path_params]
|
||||
]:
|
||||
if param_ref_name not in [k.ref for k in [p for p in path_params]]:
|
||||
path_params.append(ParameterSchema(ref=param_ref_name))
|
||||
# Ensure global parameter is present
|
||||
path_param = ParameterSchema(
|
||||
@ -595,14 +570,10 @@ class NeutronGenerator(OpenStackServerSourceBase):
|
||||
)
|
||||
# openapi_spec.components["parameters"].setdefault(global_param_name, dict())
|
||||
if not path_param.description:
|
||||
path_param.description = (
|
||||
f"{param_name} parameter for {path} API"
|
||||
)
|
||||
path_param.description = f"{param_name} parameter for {path} API"
|
||||
# We can only assume the param type. For path it is logically a string only
|
||||
path_param.type_schema = TypeSchema(type="string")
|
||||
openapi_spec.components.parameters[global_param_name] = (
|
||||
path_param
|
||||
)
|
||||
openapi_spec.components.parameters[global_param_name] = path_param
|
||||
else:
|
||||
path_resource_names.append(path_element.replace("-", "_"))
|
||||
|
||||
@ -620,8 +591,7 @@ class NeutronGenerator(OpenStackServerSourceBase):
|
||||
operation_id = re.sub(
|
||||
r"^(/?v[0-9.]*/)",
|
||||
"",
|
||||
"/".join([x.strip("{}") for x in path_elements])
|
||||
+ f":{method.lower()}", # noqa
|
||||
"/".join([x.strip("{}") for x in path_elements]) + f":{method.lower()}", # noqa
|
||||
)
|
||||
|
||||
path_spec = openapi_spec.paths.setdefault(
|
||||
@ -705,22 +675,16 @@ class NeutronGenerator(OpenStackServerSourceBase):
|
||||
for field, data in attr_info.items():
|
||||
# operation_spec.setdefault("parameters", [])
|
||||
if data.get("is_filter", False):
|
||||
global_param_name = f"{collection}_{field}".replace(
|
||||
":", "_"
|
||||
)
|
||||
param_ref_name = (
|
||||
f"#/components/parameters/{global_param_name}"
|
||||
)
|
||||
global_param_name = f"{collection}_{field}".replace(":", "_")
|
||||
param_ref_name = f"#/components/parameters/{global_param_name}"
|
||||
# Ensure global parameter is present
|
||||
query_param = (
|
||||
openapi_spec.components.parameters.setdefault(
|
||||
global_param_name,
|
||||
ParameterSchema(
|
||||
location="query",
|
||||
name=field,
|
||||
type_schema=get_schema(data),
|
||||
),
|
||||
)
|
||||
query_param = openapi_spec.components.parameters.setdefault(
|
||||
global_param_name,
|
||||
ParameterSchema(
|
||||
location="query",
|
||||
name=field,
|
||||
type_schema=get_schema(data),
|
||||
),
|
||||
)
|
||||
if not query_param.description:
|
||||
query_param.description = (
|
||||
@ -751,9 +715,7 @@ class NeutronGenerator(OpenStackServerSourceBase):
|
||||
)
|
||||
query_param.style = "form"
|
||||
query_param.explode = False
|
||||
if param_ref_name not in [
|
||||
x.ref for x in operation_spec.parameters
|
||||
]:
|
||||
if param_ref_name not in [x.ref for x in operation_spec.parameters]:
|
||||
operation_spec.parameters.append(
|
||||
ParameterSchema(ref=param_ref_name)
|
||||
)
|
||||
@ -793,9 +755,7 @@ class NeutronGenerator(OpenStackServerSourceBase):
|
||||
response_code = "204"
|
||||
|
||||
if response_code:
|
||||
rsp = responses_spec.setdefault(
|
||||
response_code, dict(description="Ok")
|
||||
)
|
||||
rsp = responses_spec.setdefault(response_code, dict(description="Ok"))
|
||||
if response_code != "204" and method != "DELETE":
|
||||
# Arrange response placeholder
|
||||
schema_name = (
|
||||
@ -889,12 +849,8 @@ class NeutronGenerator(OpenStackServerSourceBase):
|
||||
"QuotasDefaultDefaultResponse",
|
||||
"QuotasProjectProjectResponse",
|
||||
]:
|
||||
schema.properties = {
|
||||
"quota": copy.deepcopy(neutron_schemas.QUOTA_SCHEMA)
|
||||
}
|
||||
elif name.endswith("TagUpdateRequest") or name.endswith(
|
||||
"TagUpdateResponse"
|
||||
):
|
||||
schema.properties = {"quota": copy.deepcopy(neutron_schemas.QUOTA_SCHEMA)}
|
||||
elif name.endswith("TagUpdateRequest") or name.endswith("TagUpdateResponse"):
|
||||
# PUT tag does not have request body
|
||||
return None
|
||||
|
||||
@ -929,16 +885,12 @@ class NeutronGenerator(OpenStackServerSourceBase):
|
||||
send_props = {}
|
||||
return_props = {}
|
||||
# Consume request name to required fields mapping
|
||||
required_fields = neutron_schemas.REQUIRED_FIELDS_MAPPING.get(
|
||||
name, []
|
||||
)
|
||||
required_fields = neutron_schemas.REQUIRED_FIELDS_MAPPING.get(name, [])
|
||||
for field, data in schema_def.items():
|
||||
js_schema = get_schema(data)
|
||||
# Dirty hacks for corrupted schemas
|
||||
if field in ["availability_zones", "tags"]:
|
||||
js_schema.update(
|
||||
{"type": "array", "items": {"type": "string"}}
|
||||
)
|
||||
js_schema.update({"type": "array", "items": {"type": "string"}})
|
||||
elif field == "revision_number":
|
||||
js_schema.update({"type": "integer"})
|
||||
elif field == "subnets":
|
||||
@ -972,10 +924,7 @@ class NeutronGenerator(OpenStackServerSourceBase):
|
||||
},
|
||||
}
|
||||
)
|
||||
elif (
|
||||
resource_key == "floatingip"
|
||||
and field == "port_forwardings"
|
||||
):
|
||||
elif resource_key == "floatingip" and field == "port_forwardings":
|
||||
js_schema.update(
|
||||
{
|
||||
"type": "array",
|
||||
@ -1065,8 +1014,7 @@ class NeutronGenerator(OpenStackServerSourceBase):
|
||||
}
|
||||
)
|
||||
elif (
|
||||
resource_key == "security_group"
|
||||
and field == "security_group_rules"
|
||||
resource_key == "security_group" and field == "security_group_rules"
|
||||
):
|
||||
js_schema.update(
|
||||
{
|
||||
@ -1137,9 +1085,7 @@ class NeutronGenerator(OpenStackServerSourceBase):
|
||||
"maxLength": 255,
|
||||
"description": "A human-readable description for the resource.",
|
||||
},
|
||||
"normalized_cidr": {
|
||||
"type": ["string", "null"]
|
||||
},
|
||||
"normalized_cidr": {"type": ["string", "null"]},
|
||||
"remote_address_group_id": {
|
||||
"type": "string",
|
||||
"description": "The remote address group UUID that is associated with this\nsecurity group rule.",
|
||||
@ -1166,9 +1112,7 @@ class NeutronGenerator(OpenStackServerSourceBase):
|
||||
"items": {
|
||||
"type": "object",
|
||||
"properties": (
|
||||
send_props
|
||||
if name.endswith("Request")
|
||||
else return_props
|
||||
send_props if name.endswith("Request") else return_props
|
||||
),
|
||||
},
|
||||
}
|
||||
@ -1179,9 +1123,7 @@ class NeutronGenerator(OpenStackServerSourceBase):
|
||||
resource_key: {
|
||||
"type": "object",
|
||||
"properties": (
|
||||
send_props
|
||||
if name.endswith("Request")
|
||||
else return_props
|
||||
send_props if name.endswith("Request") else return_props
|
||||
),
|
||||
}
|
||||
}
|
||||
@ -1345,9 +1287,7 @@ def get_schema(param_data):
|
||||
},
|
||||
}
|
||||
elif "type:list_of_any_key_specs_or_none" in validate:
|
||||
logging.warning(
|
||||
"TODO: Implement type:list_of_any_key_specs_or_none"
|
||||
)
|
||||
logging.warning("TODO: Implement type:list_of_any_key_specs_or_none")
|
||||
schema = {
|
||||
"type": "array",
|
||||
"items": {
|
||||
@ -1427,9 +1367,7 @@ def get_schema(param_data):
|
||||
elif "type:list_of_subnets_or_none" in validate:
|
||||
schema = {"type": "array", "items": {"type": "string"}}
|
||||
else:
|
||||
raise RuntimeError(
|
||||
"Unsupported type %s in %s" % (validate, param_data)
|
||||
)
|
||||
raise RuntimeError("Unsupported type %s in %s" % (validate, param_data))
|
||||
schema = {"type": "string"}
|
||||
if convert_to:
|
||||
# Nice way to get type of the field, isn't it?
|
||||
|
@ -352,15 +352,15 @@ ROUTER_ADD_EXTERNAL_GATEWAYS_REQUEST_SCHEMA: dict[str, Any] = {
|
||||
ROUTER_UPDATE_EXTERNAL_GATEWAYS_REQUEST_SCHEMA: dict[str, Any] = copy.deepcopy(
|
||||
ROUTER_ADD_EXTERNAL_GATEWAYS_REQUEST_SCHEMA
|
||||
)
|
||||
ROUTER_UPDATE_EXTERNAL_GATEWAYS_REQUEST_SCHEMA["properties"]["router"][
|
||||
"properties"
|
||||
]["external_gateways"]["items"]["properties"]["network_id"]["readOnly"] = True
|
||||
ROUTER_UPDATE_EXTERNAL_GATEWAYS_REQUEST_SCHEMA["properties"]["router"]["properties"][
|
||||
"external_gateways"
|
||||
]["items"]["properties"]["network_id"]["readOnly"] = True
|
||||
ROUTER_REMOVE_EXTERNAL_GATEWAYS_REQUEST_SCHEMA: dict[str, Any] = copy.deepcopy(
|
||||
ROUTER_ADD_EXTERNAL_GATEWAYS_REQUEST_SCHEMA
|
||||
)
|
||||
ROUTER_REMOVE_EXTERNAL_GATEWAYS_REQUEST_SCHEMA["properties"]["router"][
|
||||
"properties"
|
||||
]["external_gateways"]["items"]["properties"].pop("enable_snat")
|
||||
ROUTER_REMOVE_EXTERNAL_GATEWAYS_REQUEST_SCHEMA["properties"]["router"]["properties"][
|
||||
"external_gateways"
|
||||
]["items"]["properties"].pop("enable_snat")
|
||||
|
||||
ADDRESS_GROUP_ADDRESS_SCHEMA: dict[str, Any] = {
|
||||
"type": "object",
|
||||
@ -518,18 +518,12 @@ def _get_schema_ref(
|
||||
**ROUTER_ADD_EXTERNAL_GATEWAYS_REQUEST_SCHEMA
|
||||
)
|
||||
ref = f"#/components/schemas/{name}"
|
||||
elif (
|
||||
name
|
||||
== "RoutersUpdate_External_GatewaysUpdate_External_GatewaysRequest"
|
||||
):
|
||||
elif name == "RoutersUpdate_External_GatewaysUpdate_External_GatewaysRequest":
|
||||
openapi_spec.components.schemas[name] = TypeSchema(
|
||||
**ROUTER_UPDATE_EXTERNAL_GATEWAYS_REQUEST_SCHEMA
|
||||
)
|
||||
ref = f"#/components/schemas/{name}"
|
||||
elif (
|
||||
name
|
||||
== "RoutersRemove_External_GatewaysRemove_External_GatewaysRequest"
|
||||
):
|
||||
elif name == "RoutersRemove_External_GatewaysRemove_External_GatewaysRequest":
|
||||
openapi_spec.components.schemas[name] = TypeSchema(
|
||||
**ROUTER_REMOVE_EXTERNAL_GATEWAYS_REQUEST_SCHEMA
|
||||
)
|
||||
@ -558,14 +552,10 @@ def _get_schema_ref(
|
||||
ref = "#/components/schemas/Address_GroupShowResponse"
|
||||
|
||||
elif name == "AgentsL3_RoutersIndexResponse":
|
||||
openapi_spec.components.schemas[name] = TypeSchema(
|
||||
**L3_ROUTER_AGENTS_SCHEMA
|
||||
)
|
||||
openapi_spec.components.schemas[name] = TypeSchema(**L3_ROUTER_AGENTS_SCHEMA)
|
||||
ref = f"#/components/schemas/{name}"
|
||||
elif name == "AgentsL3_RoutersIndexResponse":
|
||||
openapi_spec.components.schemas[name] = TypeSchema(
|
||||
**L3_ROUTER_AGENTS_SCHEMA
|
||||
)
|
||||
openapi_spec.components.schemas[name] = TypeSchema(**L3_ROUTER_AGENTS_SCHEMA)
|
||||
ref = f"#/components/schemas/{name}"
|
||||
elif name == "AgentsL3_RoutersCreateRequest":
|
||||
openapi_spec.components.schemas[name] = TypeSchema(
|
||||
|
@ -226,9 +226,7 @@ class NovaGenerator(OpenStackServerSourceBase):
|
||||
]:
|
||||
schema = openapi_spec.components.schemas.setdefault(
|
||||
name,
|
||||
TypeSchema(
|
||||
**nova_schemas.SERVER_ACTION_CREATE_IMAGE_RESPONSE_SCHEMA
|
||||
),
|
||||
TypeSchema(**nova_schemas.SERVER_ACTION_CREATE_IMAGE_RESPONSE_SCHEMA),
|
||||
)
|
||||
ref = f"#/components/schemas/{name}"
|
||||
elif name in [
|
||||
@ -243,9 +241,7 @@ class NovaGenerator(OpenStackServerSourceBase):
|
||||
elif name == "ServersActionOs-GetconsoleoutputResponse":
|
||||
schema = openapi_spec.components.schemas.setdefault(
|
||||
name,
|
||||
TypeSchema(
|
||||
**nova_schemas.SERVER_ACTION_GET_CONSOLE_OUTPUT_SCHEMA
|
||||
),
|
||||
TypeSchema(**nova_schemas.SERVER_ACTION_GET_CONSOLE_OUTPUT_SCHEMA),
|
||||
)
|
||||
ref = f"#/components/schemas/{name}"
|
||||
elif name in [
|
||||
@ -275,9 +271,7 @@ class NovaGenerator(OpenStackServerSourceBase):
|
||||
elif name == "ServersIpShowResponse":
|
||||
schema = openapi_spec.components.schemas.setdefault(
|
||||
name,
|
||||
TypeSchema(
|
||||
maxProperties=1, **nova_schemas.SERVER_ADDRESSES_SCHEMA
|
||||
),
|
||||
TypeSchema(maxProperties=1, **nova_schemas.SERVER_ADDRESSES_SCHEMA),
|
||||
)
|
||||
ref = f"#/components/schemas/{name}"
|
||||
# /servers/id/metadata
|
||||
@ -305,9 +299,7 @@ class NovaGenerator(OpenStackServerSourceBase):
|
||||
elif name == "ServersOs_Instance_ActionShowResponse":
|
||||
schema = openapi_spec.components.schemas.setdefault(
|
||||
name,
|
||||
TypeSchema(
|
||||
**nova_schemas.SERVER_INSTANCE_ACTION_CONTAINER_SCHEMA
|
||||
),
|
||||
TypeSchema(**nova_schemas.SERVER_INSTANCE_ACTION_CONTAINER_SCHEMA),
|
||||
)
|
||||
ref = f"#/components/schemas/{name}"
|
||||
# /server/id/os-interface-attachment
|
||||
@ -323,9 +315,7 @@ class NovaGenerator(OpenStackServerSourceBase):
|
||||
]:
|
||||
schema = openapi_spec.components.schemas.setdefault(
|
||||
name,
|
||||
TypeSchema(
|
||||
**nova_schemas.INTERFACE_ATTACHMENT_CONTAINER_SCHEMA
|
||||
),
|
||||
TypeSchema(**nova_schemas.INTERFACE_ATTACHMENT_CONTAINER_SCHEMA),
|
||||
)
|
||||
ref = f"#/components/schemas/{name}"
|
||||
# /server/id/os-server-password
|
||||
@ -649,12 +639,8 @@ class NovaGenerator(OpenStackServerSourceBase):
|
||||
"""Hook to allow service specific generator to modify details"""
|
||||
if operation_spec.operationId == "servers/id/action:post":
|
||||
# Sereral server actions may return Location header
|
||||
operation_spec.responses.setdefault(
|
||||
"202", {"description": "Accepted"}
|
||||
)
|
||||
headers_202 = operation_spec.responses["202"].setdefault(
|
||||
"headers", {}
|
||||
)
|
||||
operation_spec.responses.setdefault("202", {"description": "Accepted"})
|
||||
headers_202 = operation_spec.responses["202"].setdefault("headers", {})
|
||||
headers_202.setdefault(
|
||||
"Location",
|
||||
HeaderSchema(
|
||||
|
@ -192,9 +192,7 @@ FLAVORS_LIST_SCHEMA: dict[str, Any] = {
|
||||
FLAVORS_LIST_DETAIL_SCHEMA: dict[str, Any] = {
|
||||
"description": "Detailed flavors list response",
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"flavors": {"type": "array", "items": copy.deepcopy(FLAVOR_SCHEMA)}
|
||||
},
|
||||
"properties": {"flavors": {"type": "array", "items": copy.deepcopy(FLAVOR_SCHEMA)}},
|
||||
}
|
||||
|
||||
FLAVOR_ACCESS_SCHEMA: dict[str, Any] = {
|
||||
@ -469,16 +467,16 @@ REMOTE_CONSOLE_SCHEMA: dict[str, Any] = {
|
||||
"properties": {
|
||||
"protocol": {
|
||||
"type": "string",
|
||||
"enum": remote_consoles.create_v28["properties"][
|
||||
"remote_console"
|
||||
]["properties"]["protocol"]["enum"],
|
||||
"enum": remote_consoles.create_v28["properties"]["remote_console"][
|
||||
"properties"
|
||||
]["protocol"]["enum"],
|
||||
"description": "The protocol of remote console. The valid values are vnc, spice, rdp, serial and mks. The protocol mks is added since Microversion 2.8.",
|
||||
},
|
||||
"type": {
|
||||
"type": "string",
|
||||
"enum": remote_consoles.create_v28["properties"][
|
||||
"remote_console"
|
||||
]["properties"]["type"]["enum"],
|
||||
"enum": remote_consoles.create_v28["properties"]["remote_console"][
|
||||
"properties"
|
||||
]["type"]["enum"],
|
||||
"description": "The type of remote console. The valid values are novnc, rdp-html5, spice-html5, serial, and webmks. The type webmks is added since Microversion 2.8.",
|
||||
},
|
||||
"url": {
|
||||
@ -835,12 +833,8 @@ KEYPAIR_CONTAINER_SCHEMA: dict[str, Any] = {
|
||||
"properties": {"keypair": KEYPAIR_SCHEMA},
|
||||
}
|
||||
|
||||
KEYPAIR_CREATED_SCHEMA: dict[str, Any] = copy.deepcopy(
|
||||
KEYPAIR_CONTAINER_SCHEMA
|
||||
)
|
||||
KEYPAIR_CREATED_SCHEMA["properties"]["keypair"]["properties"][
|
||||
"private_key"
|
||||
] = {
|
||||
KEYPAIR_CREATED_SCHEMA: dict[str, Any] = copy.deepcopy(KEYPAIR_CONTAINER_SCHEMA)
|
||||
KEYPAIR_CREATED_SCHEMA["properties"]["keypair"]["properties"]["private_key"] = {
|
||||
"type": "string",
|
||||
"description": "If you do not provide a public key on create, a new keypair will be built for you, and the private key will be returned during the initial create call. Make sure to save this, as there is no way to get this private key again in the future.",
|
||||
"x-openstack": {"max-ver": "2.91"},
|
||||
|
@ -385,9 +385,7 @@ class OctaviaGenerator(OpenStackServerSourceBase):
|
||||
self._process_route(route, openapi_spec, framework="pecan")
|
||||
|
||||
if args.api_ref_src:
|
||||
merge_api_ref_doc(
|
||||
openapi_spec, args.api_ref_src, allow_strip_version=False
|
||||
)
|
||||
merge_api_ref_doc(openapi_spec, args.api_ref_src, allow_strip_version=False)
|
||||
|
||||
self.dump_openapi(openapi_spec, Path(impl_path), args.validate)
|
||||
|
||||
|
@ -71,9 +71,7 @@ def merge_api_ref_doc(
|
||||
# TODO(gtema): notes are aside of main "p" and not
|
||||
# underneath
|
||||
# Iterate over URLs
|
||||
operation_url_containers = section.find_all(
|
||||
"div", class_="operation-grp"
|
||||
)
|
||||
operation_url_containers = section.find_all("div", class_="operation-grp")
|
||||
for op in operation_url_containers:
|
||||
ep = op.find("div", class_="endpoint-container")
|
||||
ep_divs = ep.find_all("div")
|
||||
@ -110,9 +108,7 @@ def merge_api_ref_doc(
|
||||
# Paths have different length. Skip
|
||||
continue
|
||||
is_search_aborted = False
|
||||
for source, doc in zip(
|
||||
existing_path_parts, doc_url_parts
|
||||
):
|
||||
for source, doc in zip(existing_path_parts, doc_url_parts):
|
||||
source_ = source.strip("{}")
|
||||
doc_ = doc.strip("{}")
|
||||
if (
|
||||
@ -157,14 +153,12 @@ def merge_api_ref_doc(
|
||||
op_spec = getattr(path_spec, method.lower(), None)
|
||||
if not op_spec:
|
||||
logging.warn(
|
||||
"Cannot find %s operation for %s in the spec"
|
||||
% (method, url)
|
||||
"Cannot find %s operation for %s in the spec" % (method, url)
|
||||
)
|
||||
continue
|
||||
|
||||
if (
|
||||
op_spec.operationId in processed_operations
|
||||
and not url.endswith("/action")
|
||||
if op_spec.operationId in processed_operations and not url.endswith(
|
||||
"/action"
|
||||
):
|
||||
# Do not update operation we have already processed
|
||||
continue
|
||||
@ -175,9 +169,7 @@ def merge_api_ref_doc(
|
||||
# details section
|
||||
details_button = op.find("button")
|
||||
details_section_id = details_button["data-target"].strip("#")
|
||||
details_section = section.find(
|
||||
"section", id=details_section_id
|
||||
)
|
||||
details_section = section.find("section", id=details_section_id)
|
||||
description = []
|
||||
action_name = None
|
||||
# Gather description section paragraphs to construct operation description
|
||||
@ -187,11 +179,9 @@ def merge_api_ref_doc(
|
||||
|
||||
elif details_child.name == "section":
|
||||
if (
|
||||
details_child.h3
|
||||
and "Request" in details_child.h3.strings
|
||||
details_child.h3 and "Request" in details_child.h3.strings
|
||||
) or (
|
||||
details_child.h4
|
||||
and "Request" in details_child.h4.strings
|
||||
details_child.h4 and "Request" in details_child.h4.strings
|
||||
):
|
||||
# Found request details
|
||||
if not details_child.table:
|
||||
@ -220,15 +210,13 @@ def merge_api_ref_doc(
|
||||
method,
|
||||
)
|
||||
continue
|
||||
(schema_specs, action_name) = (
|
||||
_get_schema_candidates(
|
||||
openapi_spec,
|
||||
url,
|
||||
spec_body,
|
||||
action_name,
|
||||
summary,
|
||||
description,
|
||||
)
|
||||
(schema_specs, action_name) = _get_schema_candidates(
|
||||
openapi_spec,
|
||||
url,
|
||||
spec_body,
|
||||
action_name,
|
||||
summary,
|
||||
description,
|
||||
)
|
||||
|
||||
_doc_process_operation_table(
|
||||
@ -247,15 +235,13 @@ def merge_api_ref_doc(
|
||||
details_child.h3
|
||||
and (
|
||||
"Response" in details_child.h3.strings
|
||||
or "Response Parameters"
|
||||
in details_child.h3.strings
|
||||
or "Response Parameters" in details_child.h3.strings
|
||||
)
|
||||
) or (
|
||||
details_child.h4
|
||||
and (
|
||||
"Response" in details_child.h4.strings
|
||||
or "Response Parameters"
|
||||
in details_child.h4.strings
|
||||
or "Response Parameters" in details_child.h4.strings
|
||||
)
|
||||
):
|
||||
# Found response details
|
||||
@ -289,10 +275,8 @@ def merge_api_ref_doc(
|
||||
op_spec.operationId,
|
||||
)
|
||||
continue
|
||||
(schema_specs, action_name) = (
|
||||
_get_schema_candidates(
|
||||
openapi_spec, url, spec_body, action_name
|
||||
)
|
||||
(schema_specs, action_name) = _get_schema_candidates(
|
||||
openapi_spec, url, spec_body, action_name
|
||||
)
|
||||
try:
|
||||
_doc_process_operation_table(
|
||||
@ -351,9 +335,7 @@ def _doc_process_operation_table(
|
||||
param_def.location == doc_param_location
|
||||
and param_def.name == doc_param_name
|
||||
):
|
||||
param_def.description = LiteralScalarString(
|
||||
doc_param_descr
|
||||
)
|
||||
param_def.description = LiteralScalarString(doc_param_descr)
|
||||
elif doc_param_location == "body":
|
||||
# Body param. Traverse through body information
|
||||
for schema in schema_specs:
|
||||
@ -380,9 +362,7 @@ def _find_schema_property(schema, target_prop_name):
|
||||
return
|
||||
for prop_name, prop_def in props.items():
|
||||
prop_type = (
|
||||
prop_def.get("type")
|
||||
if isinstance(prop_def, dict)
|
||||
else prop_def.type
|
||||
prop_def.get("type") if isinstance(prop_def, dict) else prop_def.type
|
||||
)
|
||||
if prop_name == target_prop_name:
|
||||
return prop_def
|
||||
@ -417,9 +397,7 @@ def _find_schema_property(schema, target_prop_name):
|
||||
|
||||
elif xtype == "array":
|
||||
items_schema = (
|
||||
schema.items
|
||||
if isinstance(schema, TypeSchema)
|
||||
else schema.get("items")
|
||||
schema.items if isinstance(schema, TypeSchema) else schema.get("items")
|
||||
)
|
||||
candidate = _find_schema_property(items_schema, target_prop_name)
|
||||
if candidate:
|
||||
@ -456,9 +434,7 @@ def _get_schema_candidates(
|
||||
ref = spec_body.get("$ref")
|
||||
oneOf = spec_body.get("oneOf")
|
||||
if spec_body and ref:
|
||||
candidate_schema = openapi_spec.components.schemas.get(
|
||||
ref.split("/")[-1]
|
||||
)
|
||||
candidate_schema = openapi_spec.components.schemas.get(ref.split("/")[-1])
|
||||
if candidate_schema.oneOf:
|
||||
for x in candidate_schema.oneOf:
|
||||
ref = x.get("$ref") if isinstance(x, dict) else x.ref
|
||||
@ -500,9 +476,7 @@ def _get_schema_candidates(
|
||||
|
||||
elif not action_name and section_description:
|
||||
if candidate_action_name and (
|
||||
re.search(
|
||||
rf"\b{candidate_action_name}\b", section_summary
|
||||
)
|
||||
re.search(rf"\b{candidate_action_name}\b", section_summary)
|
||||
or (
|
||||
url.endswith("/volumes/{volume_id}/action")
|
||||
# Cinder doc does not contain action name in the
|
||||
@ -522,9 +496,7 @@ def _get_schema_candidates(
|
||||
itms = res.get("oneOf")
|
||||
if itms:
|
||||
for itm in itms:
|
||||
schema_specs.append(
|
||||
get_schema(openapi_spec, itm)
|
||||
)
|
||||
schema_specs.append(get_schema(openapi_spec, itm))
|
||||
schema_specs.append(res)
|
||||
# Set the action name. Since
|
||||
# Request normally comes before
|
||||
|
@ -93,6 +93,4 @@ class OpenApiSchemaGenerator(BaseGenerator):
|
||||
elif args.service_type == "shared-file-system":
|
||||
self.generate_manila(target_dir, args)
|
||||
else:
|
||||
raise RuntimeError(
|
||||
"Service type %s is not supported", args.service_type
|
||||
)
|
||||
raise RuntimeError("Service type %s is not supported", args.service_type)
|
||||
|
@ -48,9 +48,7 @@ class OSCGenerator(BaseGenerator):
|
||||
context = dict(
|
||||
res=res.resource_class,
|
||||
sdk_mod_name=res.mod_name,
|
||||
osc_mod_name=res.mod_name.replace(
|
||||
"openstack.", "openstackclient."
|
||||
),
|
||||
osc_mod_name=res.mod_name.replace("openstack.", "openstackclient."),
|
||||
class_name=res.class_name,
|
||||
resource_name=res.class_name.lower(),
|
||||
sdk_service_name=res.service_name,
|
||||
@ -80,9 +78,7 @@ class OSCGenerator(BaseGenerator):
|
||||
context,
|
||||
osc_path,
|
||||
"osc/impl_list.py.j2",
|
||||
Path(
|
||||
work_dir, "openstackclient", "/".join(osc_path), "list.py"
|
||||
),
|
||||
Path(work_dir, "openstackclient", "/".join(osc_path), "list.py"),
|
||||
"osc/test_unit_list.py.j2",
|
||||
Path(
|
||||
work_dir,
|
||||
@ -100,9 +96,7 @@ class OSCGenerator(BaseGenerator):
|
||||
context,
|
||||
osc_path,
|
||||
"osc/impl_show.py.j2",
|
||||
Path(
|
||||
work_dir, "openstackclient", "/".join(osc_path), "show.py"
|
||||
),
|
||||
Path(work_dir, "openstackclient", "/".join(osc_path), "show.py"),
|
||||
"osc/test_unit_show.py.j2",
|
||||
Path(
|
||||
work_dir,
|
||||
|
@ -97,16 +97,12 @@ class VecString(common.BasePrimitiveType):
|
||||
class JsonValue(common_rust.JsonValue):
|
||||
"""Arbitrary JSON value"""
|
||||
|
||||
clap_macros: set[str] = set(
|
||||
['value_name="JSON"', "value_parser=parse_json"]
|
||||
)
|
||||
clap_macros: set[str] = set(['value_name="JSON"', "value_parser=parse_json"])
|
||||
original_data_type: BaseCombinedType | BaseCompoundType | None = None
|
||||
|
||||
@property
|
||||
def imports(self):
|
||||
imports: set[str] = set(
|
||||
["crate::common::parse_json", "serde_json::Value"]
|
||||
)
|
||||
imports: set[str] = set(["crate::common::parse_json", "serde_json::Value"])
|
||||
if self.original_data_type and isinstance(
|
||||
self.original_data_type, common_rust.Dictionary
|
||||
):
|
||||
@ -218,9 +214,7 @@ class EnumGroupStructInputField(StructInputField):
|
||||
class EnumGroupStruct(common_rust.Struct):
|
||||
"""Container for complex Enum containing Array"""
|
||||
|
||||
field_type_class_: Type[common_rust.StructField] = (
|
||||
EnumGroupStructInputField
|
||||
)
|
||||
field_type_class_: Type[common_rust.StructField] = EnumGroupStructInputField
|
||||
base_type: str = "struct"
|
||||
sdk_enum_name: str
|
||||
is_group: bool = True
|
||||
@ -258,23 +252,17 @@ class StructFieldResponse(common_rust.StructField):
|
||||
if self.local_name != self.remote_name:
|
||||
macros.add(f'title="{self.remote_name}"')
|
||||
# Fully Qualified Attribute Name
|
||||
fqan: str = ".".join(
|
||||
[service_name, resource_name, self.remote_name]
|
||||
).lower()
|
||||
fqan: str = ".".join([service_name, resource_name, self.remote_name]).lower()
|
||||
# Check the known alias of the field by FQAN
|
||||
alias = common.FQAN_ALIAS_MAP.get(fqan)
|
||||
if operation_type in ["list", "list_from_struct"]:
|
||||
if (
|
||||
"id" in struct.fields.keys()
|
||||
and not (
|
||||
self.local_name in BASIC_FIELDS or alias in BASIC_FIELDS
|
||||
)
|
||||
and not (self.local_name in BASIC_FIELDS or alias in BASIC_FIELDS)
|
||||
) or (
|
||||
"id" not in struct.fields.keys()
|
||||
and (self.local_name not in list(struct.fields.keys())[-10:])
|
||||
and not (
|
||||
self.local_name in BASIC_FIELDS or alias in BASIC_FIELDS
|
||||
)
|
||||
and not (self.local_name in BASIC_FIELDS or alias in BASIC_FIELDS)
|
||||
):
|
||||
# Only add "wide" flag if field is not in the basic fields AND
|
||||
# there is at least "id" field existing in the struct OR the
|
||||
@ -432,9 +420,7 @@ class RequestParameter(common_rust.RequestParameter):
|
||||
|
||||
|
||||
class RequestTypeManager(common_rust.TypeManager):
|
||||
primitive_type_mapping: dict[
|
||||
Type[model.PrimitiveType], Type[BasePrimitiveType]
|
||||
] = {
|
||||
primitive_type_mapping: dict[Type[model.PrimitiveType], Type[BasePrimitiveType]] = {
|
||||
model.PrimitiveString: String,
|
||||
model.ConstraintString: String,
|
||||
model.PrimitiveAny: JsonValue,
|
||||
@ -452,17 +438,13 @@ class RequestTypeManager(common_rust.TypeManager):
|
||||
model.Set: ArrayInput,
|
||||
}
|
||||
|
||||
request_parameter_class: Type[common_rust.RequestParameter] = (
|
||||
RequestParameter
|
||||
)
|
||||
request_parameter_class: Type[common_rust.RequestParameter] = RequestParameter
|
||||
string_enum_class = StringEnum
|
||||
|
||||
def get_local_attribute_name(self, name: str) -> str:
|
||||
"""Get localized attribute name"""
|
||||
name = name.replace(".", "_")
|
||||
attr_name = "_".join(
|
||||
x.lower() for x in re.split(common.SPLIT_NAME_RE, name)
|
||||
)
|
||||
attr_name = "_".join(x.lower() for x in re.split(common.SPLIT_NAME_RE, name))
|
||||
if attr_name in ["type", "self", "enum", "ref", "default"]:
|
||||
attr_name = f"_{attr_name}"
|
||||
return attr_name
|
||||
@ -487,9 +469,7 @@ class RequestTypeManager(common_rust.TypeManager):
|
||||
|
||||
# Field is of Enum type.
|
||||
if isinstance(result, common_rust.Enum):
|
||||
variant_classes = [
|
||||
x.data_type.__class__ for x in result.kinds.values()
|
||||
]
|
||||
variant_classes = [x.data_type.__class__ for x in result.kinds.values()]
|
||||
|
||||
if (
|
||||
StringEnum in variant_classes
|
||||
@ -547,9 +527,7 @@ class RequestTypeManager(common_rust.TypeManager):
|
||||
) -> BasePrimitiveType | BaseCombinedType | BaseCompoundType:
|
||||
"""Get local destination type from the ModelType"""
|
||||
model_ref: model.Reference | None = None
|
||||
typ: BasePrimitiveType | BaseCombinedType | BaseCompoundType | None = (
|
||||
None
|
||||
)
|
||||
typ: BasePrimitiveType | BaseCombinedType | BaseCompoundType | None = None
|
||||
|
||||
if isinstance(type_model, model.Reference):
|
||||
model_ref = type_model
|
||||
@ -579,10 +557,7 @@ class RequestTypeManager(common_rust.TypeManager):
|
||||
else:
|
||||
item_type = type_model.item_type
|
||||
|
||||
if (
|
||||
isinstance(item_type, model.Struct)
|
||||
and len(item_type.fields.keys()) > 1
|
||||
):
|
||||
if isinstance(item_type, model.Struct) and len(item_type.fields.keys()) > 1:
|
||||
# An array of structs with more then 1 field
|
||||
# Array of Structs can not be handled by the CLI (input).
|
||||
# Therefore handle underlaying structure as Json saving
|
||||
@ -617,9 +592,7 @@ class RequestTypeManager(common_rust.TypeManager):
|
||||
):
|
||||
original_data_type = self.convert_model(type_model.value_type)
|
||||
typ = JsonValue(
|
||||
original_data_type=DictionaryInput(
|
||||
value_type=original_data_type
|
||||
)
|
||||
original_data_type=DictionaryInput(value_type=original_data_type)
|
||||
)
|
||||
|
||||
if typ:
|
||||
@ -635,9 +608,7 @@ class RequestTypeManager(common_rust.TypeManager):
|
||||
struct_class = self.data_type_mapping[model.Struct]
|
||||
mod = struct_class(
|
||||
name=self.get_model_name(type_model.reference),
|
||||
description=common_rust.sanitize_rust_docstrings(
|
||||
type_model.description
|
||||
),
|
||||
description=common_rust.sanitize_rust_docstrings(type_model.description),
|
||||
)
|
||||
field_class = mod.field_type_class_
|
||||
for field_name, field in type_model.fields.items():
|
||||
@ -666,9 +637,7 @@ class RequestTypeManager(common_rust.TypeManager):
|
||||
)
|
||||
and not (
|
||||
# and not Option<Primitive>
|
||||
isinstance(
|
||||
field_data_type.value_type, self.option_type_class
|
||||
)
|
||||
isinstance(field_data_type.value_type, self.option_type_class)
|
||||
and isinstance(
|
||||
field_data_type.value_type.item_type,
|
||||
common_rust.BasePrimitiveType,
|
||||
@ -677,13 +646,9 @@ class RequestTypeManager(common_rust.TypeManager):
|
||||
):
|
||||
dict_type_model = self._get_adt_by_reference(field.data_type)
|
||||
simplified_data_type = JsonValue()
|
||||
simplified_data_type.original_data_type = (
|
||||
field_data_type.value_type
|
||||
)
|
||||
simplified_data_type.original_data_type = field_data_type.value_type
|
||||
field_data_type.value_type = simplified_data_type
|
||||
self.ignored_models.append(
|
||||
dict_type_model.value_type.reference
|
||||
)
|
||||
self.ignored_models.append(dict_type_model.value_type.reference)
|
||||
elif isinstance(field_data_type, StructInput):
|
||||
# Check if one of the sub fields has same attribute name as in the current struct.
|
||||
# Ideally this should not ever happen, but i.e. image.namespace.property has the case
|
||||
@ -701,16 +666,12 @@ class RequestTypeManager(common_rust.TypeManager):
|
||||
f = field_class(
|
||||
local_name=self.get_local_attribute_name(field_name),
|
||||
remote_name=self.get_remote_attribute_name(field_name),
|
||||
description=common_rust.sanitize_rust_docstrings(
|
||||
field.description
|
||||
),
|
||||
description=common_rust.sanitize_rust_docstrings(field.description),
|
||||
data_type=field_data_type,
|
||||
is_optional=not field.is_required,
|
||||
is_nullable=is_nullable,
|
||||
)
|
||||
if mod.name != "Request" and isinstance(
|
||||
field_data_type, struct_class
|
||||
):
|
||||
if mod.name != "Request" and isinstance(field_data_type, struct_class):
|
||||
field_data_type.is_group = True
|
||||
field_data_type.is_required = field.is_required
|
||||
if isinstance(field_data_type, self.option_type_class):
|
||||
@ -782,9 +743,7 @@ class RequestTypeManager(common_rust.TypeManager):
|
||||
|
||||
|
||||
class ResponseTypeManager(common_rust.TypeManager):
|
||||
primitive_type_mapping: dict[
|
||||
Type[model.PrimitiveType], Type[BasePrimitiveType]
|
||||
] = {
|
||||
primitive_type_mapping: dict[Type[model.PrimitiveType], Type[BasePrimitiveType]] = {
|
||||
model.PrimitiveString: common_rust.String,
|
||||
model.ConstraintString: common_rust.String,
|
||||
}
|
||||
@ -805,8 +764,7 @@ class ResponseTypeManager(common_rust.TypeManager):
|
||||
if not model_ref:
|
||||
return "Response"
|
||||
return "Response" + "".join(
|
||||
x.capitalize()
|
||||
for x in re.split(common.SPLIT_NAME_RE, model_ref.name)
|
||||
x.capitalize() for x in re.split(common.SPLIT_NAME_RE, model_ref.name)
|
||||
)
|
||||
|
||||
def convert_model(
|
||||
@ -815,9 +773,7 @@ class ResponseTypeManager(common_rust.TypeManager):
|
||||
) -> BasePrimitiveType | BaseCombinedType | BaseCompoundType:
|
||||
"""Get local destination type from the ModelType"""
|
||||
model_ref: model.Reference | None = None
|
||||
typ: BasePrimitiveType | BaseCombinedType | BaseCompoundType | None = (
|
||||
None
|
||||
)
|
||||
typ: BasePrimitiveType | BaseCombinedType | BaseCompoundType | None = None
|
||||
if isinstance(type_model, model.Reference):
|
||||
model_ref = type_model
|
||||
type_model = self._get_adt_by_reference(model_ref)
|
||||
@ -858,9 +814,7 @@ class ResponseTypeManager(common_rust.TypeManager):
|
||||
# There is no sense of Enum in the output. Convert to the plain
|
||||
# string
|
||||
typ = String(
|
||||
description=common_rust.sanitize_rust_docstrings(
|
||||
typ.description
|
||||
)
|
||||
description=common_rust.sanitize_rust_docstrings(typ.description)
|
||||
)
|
||||
if (
|
||||
typ
|
||||
@ -875,23 +829,18 @@ class ResponseTypeManager(common_rust.TypeManager):
|
||||
def _simplify_oneof_combinations(self, type_model, kinds):
|
||||
"""Simplify certain known oneOf combinations"""
|
||||
kinds_classes = [x["class"] for x in kinds]
|
||||
if (
|
||||
common_rust.String in kinds_classes
|
||||
and common_rust.Number in kinds_classes
|
||||
):
|
||||
if common_rust.String in kinds_classes and common_rust.Number in kinds_classes:
|
||||
# oneOf [string, number] => NumString
|
||||
kinds.clear()
|
||||
kinds.append({"local": NumString(), "class": NumString})
|
||||
elif (
|
||||
common_rust.String in kinds_classes
|
||||
and common_rust.Integer in kinds_classes
|
||||
common_rust.String in kinds_classes and common_rust.Integer in kinds_classes
|
||||
):
|
||||
# oneOf [string, integer] => NumString
|
||||
kinds.clear()
|
||||
kinds.append({"local": IntString(), "class": IntString})
|
||||
elif (
|
||||
common_rust.String in kinds_classes
|
||||
and common_rust.Boolean in kinds_classes
|
||||
common_rust.String in kinds_classes and common_rust.Boolean in kinds_classes
|
||||
):
|
||||
# oneOf [string, boolean] => String
|
||||
kinds.clear()
|
||||
@ -903,9 +852,7 @@ class ResponseTypeManager(common_rust.TypeManager):
|
||||
struct_class = self.data_type_mapping[model.Struct]
|
||||
mod = struct_class(
|
||||
name=self.get_model_name(type_model.reference),
|
||||
description=common_rust.sanitize_rust_docstrings(
|
||||
type_model.description
|
||||
),
|
||||
description=common_rust.sanitize_rust_docstrings(type_model.description),
|
||||
)
|
||||
field_class = mod.field_type_class_
|
||||
for field_name, field in type_model.fields.items():
|
||||
@ -925,9 +872,7 @@ class ResponseTypeManager(common_rust.TypeManager):
|
||||
f = field_class(
|
||||
local_name=self.get_local_attribute_name(field_name),
|
||||
remote_name=self.get_remote_attribute_name(field_name),
|
||||
description=common_rust.sanitize_rust_docstrings(
|
||||
field.description
|
||||
),
|
||||
description=common_rust.sanitize_rust_docstrings(field.description),
|
||||
data_type=field_data_type,
|
||||
is_optional=not field.is_required,
|
||||
is_nullable=is_nullable,
|
||||
@ -1038,8 +983,7 @@ class RustCliGenerator(BaseGenerator):
|
||||
):
|
||||
"""Generate code for the Rust openstack_cli"""
|
||||
logging.debug(
|
||||
"Generating Rust CLI code for `%s` in %s"
|
||||
% (operation_id, target_dir)
|
||||
"Generating Rust CLI code for `%s` in %s" % (operation_id, target_dir)
|
||||
)
|
||||
work_dir = Path(target_dir, "rust", "openstack_cli", "src")
|
||||
|
||||
@ -1048,9 +992,7 @@ class RustCliGenerator(BaseGenerator):
|
||||
if not operation_id:
|
||||
operation_id = args.openapi_operation_id
|
||||
|
||||
(path, method, spec) = common.find_openapi_operation(
|
||||
openapi_spec, operation_id
|
||||
)
|
||||
(path, method, spec) = common.find_openapi_operation(openapi_spec, operation_id)
|
||||
_, res_name = res.split(".") if res else (None, None)
|
||||
resource_name = common.get_resource_names_from_url(path)[-1]
|
||||
|
||||
@ -1072,12 +1014,12 @@ class RustCliGenerator(BaseGenerator):
|
||||
global_additional_imports: set[str] = set()
|
||||
|
||||
# Collect all operation parameters
|
||||
for param in openapi_spec["paths"][path].get(
|
||||
for param in openapi_spec["paths"][path].get("parameters", []) + spec.get(
|
||||
"parameters", []
|
||||
) + spec.get("parameters", []):
|
||||
if (
|
||||
("{" + param["name"] + "}") in path and param["in"] == "path"
|
||||
) or param["in"] != "path":
|
||||
):
|
||||
if (("{" + param["name"] + "}") in path and param["in"] == "path") or param[
|
||||
"in"
|
||||
] != "path":
|
||||
# Respect path params that appear in path and not path params
|
||||
param_ = openapi_parser.parse_parameter(param)
|
||||
if param_.name in [
|
||||
@ -1090,27 +1032,18 @@ class RustCliGenerator(BaseGenerator):
|
||||
if param_.resource_link:
|
||||
link_res_name: str = param_.resource_link.split(".")[0]
|
||||
global_additional_imports.add("tracing::warn")
|
||||
global_additional_imports.add(
|
||||
"openstack_sdk::api::find_by_name"
|
||||
)
|
||||
global_additional_imports.add(
|
||||
"openstack_sdk::api::QueryAsync"
|
||||
)
|
||||
global_additional_imports.add("openstack_sdk::api::find_by_name")
|
||||
global_additional_imports.add("openstack_sdk::api::QueryAsync")
|
||||
global_additional_imports.add(
|
||||
f"openstack_sdk::api::{'::'.join(link_res_name.split('/'))}::find as find_{link_res_name.split('/')[-1]}"
|
||||
)
|
||||
|
||||
# List of operation variants (based on the body)
|
||||
operation_variants = common.get_operation_variants(
|
||||
spec, args.operation_name
|
||||
)
|
||||
operation_variants = common.get_operation_variants(spec, args.operation_name)
|
||||
|
||||
body_types: list[str] = []
|
||||
last_path_parameter: RequestParameter | None = None
|
||||
if (
|
||||
args.operation_type == "download"
|
||||
and path == "/v2/images/{image_id}/file"
|
||||
):
|
||||
if args.operation_type == "download" and path == "/v2/images/{image_id}/file":
|
||||
is_image_download = True
|
||||
|
||||
if args.operation_type == "upload":
|
||||
@ -1123,9 +1056,7 @@ class RustCliGenerator(BaseGenerator):
|
||||
logging.debug("Processing variant %s" % operation_variant)
|
||||
additional_imports = set(global_additional_imports)
|
||||
type_manager: common_rust.TypeManager = RequestTypeManager()
|
||||
response_type_manager: common_rust.TypeManager = (
|
||||
ResponseTypeManager()
|
||||
)
|
||||
response_type_manager: common_rust.TypeManager = ResponseTypeManager()
|
||||
result_is_list: bool = False
|
||||
is_list_paginated: bool = False
|
||||
if operation_params:
|
||||
@ -1195,20 +1126,14 @@ class RustCliGenerator(BaseGenerator):
|
||||
response = common.find_response_schema(
|
||||
spec["responses"],
|
||||
args.response_key or resource_name,
|
||||
(
|
||||
args.operation_name
|
||||
if args.operation_type == "action"
|
||||
else None
|
||||
),
|
||||
(args.operation_name if args.operation_type == "action" else None),
|
||||
)
|
||||
|
||||
if response:
|
||||
response_key: str
|
||||
if args.response_key:
|
||||
response_key = (
|
||||
args.response_key
|
||||
if args.response_key != "null"
|
||||
else None
|
||||
args.response_key if args.response_key != "null" else None
|
||||
)
|
||||
else:
|
||||
response_key = resource_name
|
||||
@ -1223,9 +1148,7 @@ class RustCliGenerator(BaseGenerator):
|
||||
isinstance(response_def.get("type"), list)
|
||||
and "object" in response_def["type"]
|
||||
):
|
||||
(root, response_types) = openapi_parser.parse(
|
||||
response_def
|
||||
)
|
||||
(root, response_types) = openapi_parser.parse(response_def)
|
||||
if isinstance(root, model.Dictionary):
|
||||
value_type: (
|
||||
common_rust.BasePrimitiveType
|
||||
@ -1234,10 +1157,8 @@ class RustCliGenerator(BaseGenerator):
|
||||
| None
|
||||
) = None
|
||||
try:
|
||||
value_type = (
|
||||
response_type_manager.convert_model(
|
||||
root.value_type
|
||||
)
|
||||
value_type = response_type_manager.convert_model(
|
||||
root.value_type
|
||||
)
|
||||
except Exception:
|
||||
# In rare cases we can not conter
|
||||
@ -1248,19 +1169,13 @@ class RustCliGenerator(BaseGenerator):
|
||||
value_type = JsonValue()
|
||||
# if not isinstance(value_type, common_rust.BasePrimitiveType):
|
||||
# value_type = JsonValue(original_data_type=value_type)
|
||||
root_dict = HashMapResponse(
|
||||
value_type=value_type
|
||||
)
|
||||
root_dict = HashMapResponse(value_type=value_type)
|
||||
response_type_manager.refs[
|
||||
model.Reference(
|
||||
name="Body", type=HashMapResponse
|
||||
)
|
||||
model.Reference(name="Body", type=HashMapResponse)
|
||||
] = root_dict
|
||||
|
||||
else:
|
||||
response_type_manager.set_models(
|
||||
response_types
|
||||
)
|
||||
response_type_manager.set_models(response_types)
|
||||
|
||||
if method == "patch" and not request_types:
|
||||
# image patch is a jsonpatch based operation
|
||||
@ -1284,15 +1199,11 @@ class RustCliGenerator(BaseGenerator):
|
||||
elif response_def["type"] == "string":
|
||||
(root_dt, _) = openapi_parser.parse(response_def)
|
||||
if not root_dt:
|
||||
raise RuntimeError(
|
||||
"Response data can not be processed"
|
||||
)
|
||||
raise RuntimeError("Response data can not be processed")
|
||||
field = common_rust.StructField(
|
||||
local_name="dummy",
|
||||
remote_name="dummy",
|
||||
data_type=response_type_manager.convert_model(
|
||||
root_dt
|
||||
),
|
||||
data_type=response_type_manager.convert_model(root_dt),
|
||||
is_optional=False,
|
||||
)
|
||||
tuple_struct = TupleStruct(name="Response")
|
||||
@ -1301,8 +1212,7 @@ class RustCliGenerator(BaseGenerator):
|
||||
model.Reference(name="Body", type=TupleStruct)
|
||||
] = tuple_struct
|
||||
elif (
|
||||
response_def["type"] == "array"
|
||||
and "items" in response_def
|
||||
response_def["type"] == "array" and "items" in response_def
|
||||
):
|
||||
(_, response_types) = openapi_parser.parse(
|
||||
response_def["items"]
|
||||
@ -1312,9 +1222,9 @@ class RustCliGenerator(BaseGenerator):
|
||||
response_props = response.get("properties", {})
|
||||
if (
|
||||
response_props
|
||||
and response_props[
|
||||
list(response_props.keys())[0]
|
||||
].get("type")
|
||||
and response_props[list(response_props.keys())[0]].get(
|
||||
"type"
|
||||
)
|
||||
== "array"
|
||||
):
|
||||
result_is_list = True
|
||||
@ -1356,9 +1266,7 @@ class RustCliGenerator(BaseGenerator):
|
||||
|
||||
if args.operation_type == "list":
|
||||
# Make plural form for listing
|
||||
target_class_name = common.get_plural_form(
|
||||
target_class_name
|
||||
)
|
||||
target_class_name = common.get_plural_form(target_class_name)
|
||||
if "limit" in [
|
||||
k for (k, _) in type_manager.get_parameters("query")
|
||||
]:
|
||||
@ -1370,18 +1278,10 @@ class RustCliGenerator(BaseGenerator):
|
||||
additional_imports.add("crate::common::download_file")
|
||||
|
||||
if args.operation_type == "upload":
|
||||
additional_imports.add(
|
||||
"crate::common::build_upload_asyncread"
|
||||
)
|
||||
additional_imports.add("crate::common::build_upload_asyncread")
|
||||
if (
|
||||
(
|
||||
isinstance(root_type, StructResponse)
|
||||
and root_type.fields
|
||||
)
|
||||
or (
|
||||
isinstance(root_type, TupleStruct)
|
||||
and root_type.tuple_fields
|
||||
)
|
||||
(isinstance(root_type, StructResponse) and root_type.fields)
|
||||
or (isinstance(root_type, TupleStruct) and root_type.tuple_fields)
|
||||
or (isinstance(root_type, common_rust.Dictionary))
|
||||
):
|
||||
additional_imports.add("openstack_sdk::api::QueryAsync")
|
||||
@ -1394,18 +1294,10 @@ class RustCliGenerator(BaseGenerator):
|
||||
additional_imports.add("structable_derive::StructTable")
|
||||
|
||||
if resource_header_metadata:
|
||||
additional_imports.add(
|
||||
"crate::common::HashMapStringString"
|
||||
)
|
||||
additional_imports.add("crate::common::HashMapStringString")
|
||||
additional_imports.add("std::collections::HashMap")
|
||||
if (
|
||||
len(
|
||||
[
|
||||
x
|
||||
for x in resource_header_metadata.keys()
|
||||
if "*" in x
|
||||
]
|
||||
)
|
||||
len([x for x in resource_header_metadata.keys() if "*" in x])
|
||||
> 0
|
||||
):
|
||||
additional_imports.add("regex::Regex")
|
||||
@ -1445,16 +1337,12 @@ class RustCliGenerator(BaseGenerator):
|
||||
command_description = operation_body.get(
|
||||
"description", command_description
|
||||
)
|
||||
command_summary = operation_body.get(
|
||||
"summary", command_summary
|
||||
)
|
||||
command_summary = operation_body.get("summary", command_summary)
|
||||
|
||||
if command_summary and microversion:
|
||||
command_summary += f" (microversion = {microversion})"
|
||||
if not command_description:
|
||||
command_description = (
|
||||
"Command without description in OpenAPI"
|
||||
)
|
||||
command_description = "Command without description in OpenAPI"
|
||||
context = dict(
|
||||
operation_id=operation_id,
|
||||
operation_type=args.operation_type,
|
||||
@ -1496,9 +1384,7 @@ class RustCliGenerator(BaseGenerator):
|
||||
|
||||
if not args.cli_mod_path:
|
||||
# mod_name = args.operation_name or args.operation_type.value
|
||||
impl_path = Path(
|
||||
work_dir, "/".join(cli_mod_path), f"{mod_name}.rs"
|
||||
)
|
||||
impl_path = Path(work_dir, "/".join(cli_mod_path), f"{mod_name}.rs")
|
||||
|
||||
self._render_command(
|
||||
context,
|
||||
@ -1516,9 +1402,7 @@ class RustCliGenerator(BaseGenerator):
|
||||
)
|
||||
cmd = args.cli_full_command
|
||||
if microversion:
|
||||
cmd = args.cli_full_command + microversion.replace(
|
||||
".", ""
|
||||
)
|
||||
cmd = args.cli_full_command + microversion.replace(".", "")
|
||||
|
||||
test_context = {
|
||||
"service_type": args.service_type,
|
||||
|
@ -58,11 +58,7 @@ class Enum(common_rust.Enum):
|
||||
+ first_kind_name
|
||||
+ "("
|
||||
+ first_kind_val.data_type.get_sample()
|
||||
+ (
|
||||
".into()"
|
||||
if isinstance(first_kind_val.data_type, String)
|
||||
else ""
|
||||
)
|
||||
+ (".into()" if isinstance(first_kind_val.data_type, String) else "")
|
||||
+ ")"
|
||||
)
|
||||
return res
|
||||
@ -186,9 +182,7 @@ class BTreeMap(common_rust.Dictionary):
|
||||
f".map(|(k, v)| (k, v.into_iter()))"
|
||||
)
|
||||
else:
|
||||
type_hint = self.value_type.type_hint.replace(
|
||||
"Cow<'a, str>", "String"
|
||||
)
|
||||
type_hint = self.value_type.type_hint.replace("Cow<'a, str>", "String")
|
||||
return f"BTreeMap::<String, {type_hint}>::new().into_iter()"
|
||||
|
||||
def get_mandatory_init(self):
|
||||
@ -247,9 +241,7 @@ class TypeManager(common_rust.TypeManager):
|
||||
model.CommaSeparatedList: CommaSeparatedList,
|
||||
}
|
||||
|
||||
request_parameter_class: Type[common_rust.RequestParameter] = (
|
||||
RequestParameter
|
||||
)
|
||||
request_parameter_class: Type[common_rust.RequestParameter] = RequestParameter
|
||||
|
||||
def set_parameters(self, parameters: list[model.RequestParameter]) -> None:
|
||||
"""Set OpenAPI operation parameters into typemanager for conversion"""
|
||||
@ -316,9 +308,7 @@ class RustSdkGenerator(BaseGenerator):
|
||||
openapi_spec = common.get_openapi_spec(args.openapi_yaml_spec)
|
||||
if not operation_id:
|
||||
operation_id = args.openapi_operation_id
|
||||
(path, method, spec) = common.find_openapi_operation(
|
||||
openapi_spec, operation_id
|
||||
)
|
||||
(path, method, spec) = common.find_openapi_operation(openapi_spec, operation_id)
|
||||
if args.operation_type == "find":
|
||||
yield self.generate_find_mod(
|
||||
target_dir,
|
||||
@ -344,12 +334,12 @@ class RustSdkGenerator(BaseGenerator):
|
||||
type_manager: TypeManager | None = None
|
||||
is_json_patch: bool = False
|
||||
# Collect all operation parameters
|
||||
for param in openapi_spec["paths"][path].get(
|
||||
for param in openapi_spec["paths"][path].get("parameters", []) + spec.get(
|
||||
"parameters", []
|
||||
) + spec.get("parameters", []):
|
||||
if (
|
||||
("{" + param["name"] + "}") in path and param["in"] == "path"
|
||||
) or param["in"] != "path":
|
||||
):
|
||||
if (("{" + param["name"] + "}") in path and param["in"] == "path") or param[
|
||||
"in"
|
||||
] != "path":
|
||||
# Respect path params that appear in path and not path params
|
||||
param_ = openapi_parser.parse_parameter(param)
|
||||
if param_.name in [
|
||||
@ -363,9 +353,7 @@ class RustSdkGenerator(BaseGenerator):
|
||||
|
||||
# Process body information
|
||||
# List of operation variants (based on the body)
|
||||
operation_variants = common.get_operation_variants(
|
||||
spec, args.operation_name
|
||||
)
|
||||
operation_variants = common.get_operation_variants(spec, args.operation_name)
|
||||
|
||||
api_ver_matches: re.Match | None = None
|
||||
path_elements = path.lstrip("/").split("/")
|
||||
@ -575,9 +563,9 @@ class RustSdkGenerator(BaseGenerator):
|
||||
operation_path_params: list[model.RequestParameter] = []
|
||||
operation_query_params: list[model.RequestParameter] = []
|
||||
|
||||
for param in openapi_spec["paths"][path].get(
|
||||
for param in openapi_spec["paths"][path].get("parameters", []) + spec.get(
|
||||
"parameters", []
|
||||
) + spec.get("parameters", []):
|
||||
):
|
||||
if ("{" + param["name"] + "}") in path and param["in"] == "path":
|
||||
# Respect path params that appear in path and not in path params
|
||||
param_ = openapi_parser.parse_parameter(param)
|
||||
@ -603,9 +591,7 @@ class RustSdkGenerator(BaseGenerator):
|
||||
name_field=name_field,
|
||||
type_manager=type_manager,
|
||||
list_lifetime=(
|
||||
"<'a>"
|
||||
if operation_query_params or operation_path_params
|
||||
else ""
|
||||
"<'a>" if operation_query_params or operation_path_params else ""
|
||||
),
|
||||
)
|
||||
|
||||
|
@ -31,7 +31,5 @@ class TestGenerator(TestCase):
|
||||
generator.generate(work_dir.name, Args(validate=True))
|
||||
|
||||
self.assertTrue(
|
||||
Path(
|
||||
work_dir.name, "openapi_specs", "block-storage", "v3.yaml"
|
||||
).exists()
|
||||
Path(work_dir.name, "openapi_specs", "block-storage", "v3.yaml").exists()
|
||||
)
|
||||
|
@ -31,7 +31,5 @@ class TestGenerator(TestCase):
|
||||
generator.generate(work_dir.name, Args(validate=True))
|
||||
|
||||
self.assertTrue(
|
||||
Path(
|
||||
work_dir.name, "openapi_specs", "identity", "v3.yaml"
|
||||
).exists()
|
||||
Path(work_dir.name, "openapi_specs", "identity", "v3.yaml").exists()
|
||||
)
|
||||
|
@ -31,7 +31,5 @@ class TestGenerator(TestCase):
|
||||
generator.generate(work_dir.name, Args(validate=True))
|
||||
|
||||
self.assertTrue(
|
||||
Path(
|
||||
work_dir.name, "openapi_specs", "load-balancing", "v2.yaml"
|
||||
).exists()
|
||||
Path(work_dir.name, "openapi_specs", "load-balancing", "v2.yaml").exists()
|
||||
)
|
||||
|
@ -81,9 +81,7 @@ class TestFindResponseSchema(TestCase):
|
||||
},
|
||||
"204": {
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {"oneOf": [foo_action, bar_action]}
|
||||
}
|
||||
"application/json": {"schema": {"oneOf": [foo_action, bar_action]}}
|
||||
}
|
||||
},
|
||||
}
|
||||
|
@ -410,9 +410,7 @@ EXPECTED_TLA_DATA = model.Struct(
|
||||
min_ver="2.94",
|
||||
),
|
||||
"os:scheduler_hints": model.StructField(
|
||||
data_type=model.Reference(
|
||||
name="os:scheduler_hints", type=model.Struct
|
||||
),
|
||||
data_type=model.Reference(name="os:scheduler_hints", type=model.Struct),
|
||||
description="scheduler hints description",
|
||||
min_ver="2.94",
|
||||
),
|
||||
@ -468,9 +466,7 @@ EXPECTED_DATA_TYPES = [
|
||||
reference=model.Reference(name="networks", type=model.Struct),
|
||||
fields={
|
||||
"fixed_ip": model.StructField(
|
||||
data_type=model.Reference(
|
||||
name="fixed_ip", type=model.OneOfType
|
||||
),
|
||||
data_type=model.Reference(name="fixed_ip", type=model.OneOfType),
|
||||
),
|
||||
"port": model.StructField(
|
||||
data_type=model.Reference(name="port", type=model.OneOfType),
|
||||
@ -523,17 +519,13 @@ EXPECTED_DATA_TYPES = [
|
||||
# ],
|
||||
# ),
|
||||
model.Enum(
|
||||
reference=model.Reference(
|
||||
name="delete_on_termination", type=model.Enum
|
||||
),
|
||||
reference=model.Reference(name="delete_on_termination", type=model.Enum),
|
||||
literals=[True, "True", False, "False"],
|
||||
base_types=[model.ConstraintString, model.PrimitiveBoolean],
|
||||
min_ver="2.94",
|
||||
),
|
||||
model.Struct(
|
||||
reference=model.Reference(
|
||||
name="block_device_mapping", type=model.Struct
|
||||
),
|
||||
reference=model.Reference(name="block_device_mapping", type=model.Struct),
|
||||
fields={
|
||||
"virtual_name": model.StructField(
|
||||
data_type=model.ConstraintString(maxLength=255),
|
||||
@ -545,9 +537,7 @@ EXPECTED_DATA_TYPES = [
|
||||
data_type=model.ConstraintString(format="uuid"),
|
||||
),
|
||||
"volume_size": model.StructField(
|
||||
data_type=model.Reference(
|
||||
name="volume_size", type=model.OneOfType
|
||||
),
|
||||
data_type=model.Reference(name="volume_size", type=model.OneOfType),
|
||||
),
|
||||
"device_name": model.StructField(
|
||||
data_type=model.ConstraintString(
|
||||
@ -596,9 +586,7 @@ EXPECTED_DATA_TYPES = [
|
||||
min_ver="2.94",
|
||||
),
|
||||
model.Struct(
|
||||
reference=model.Reference(
|
||||
name="block_device_mapping_v2", type=model.Struct
|
||||
),
|
||||
reference=model.Reference(name="block_device_mapping_v2", type=model.Struct),
|
||||
fields={
|
||||
"virtual_name": model.StructField(
|
||||
data_type=model.ConstraintString(maxLength=255),
|
||||
@ -614,9 +602,7 @@ EXPECTED_DATA_TYPES = [
|
||||
),
|
||||
),
|
||||
"volume_size": model.StructField(
|
||||
data_type=model.Reference(
|
||||
name="volume_size", type=model.OneOfType
|
||||
),
|
||||
data_type=model.Reference(name="volume_size", type=model.OneOfType),
|
||||
),
|
||||
"device_name": model.StructField(
|
||||
data_type=model.ConstraintString(
|
||||
@ -654,9 +640,7 @@ EXPECTED_DATA_TYPES = [
|
||||
),
|
||||
),
|
||||
"destination_type": model.StructField(
|
||||
data_type=model.Reference(
|
||||
name="destination_type", type=model.Enum
|
||||
)
|
||||
data_type=model.Reference(name="destination_type", type=model.Enum)
|
||||
),
|
||||
"guest_format": model.StructField(
|
||||
data_type=model.ConstraintString(
|
||||
@ -674,9 +658,7 @@ EXPECTED_DATA_TYPES = [
|
||||
),
|
||||
),
|
||||
"boot_index": model.StructField(
|
||||
data_type=model.Reference(
|
||||
name="boot_index", type=model.OneOfType
|
||||
),
|
||||
data_type=model.Reference(name="boot_index", type=model.OneOfType),
|
||||
),
|
||||
"tag": model.StructField(
|
||||
data_type=model.ConstraintString(
|
||||
@ -686,29 +668,19 @@ EXPECTED_DATA_TYPES = [
|
||||
),
|
||||
),
|
||||
"volume_type": model.StructField(
|
||||
data_type=model.Reference(
|
||||
name="volume_type", type=model.OneOfType
|
||||
),
|
||||
data_type=model.Reference(name="volume_type", type=model.OneOfType),
|
||||
),
|
||||
},
|
||||
min_ver="2.94",
|
||||
),
|
||||
model.Array(
|
||||
reference=model.Reference(
|
||||
name="block_device_mapping", type=model.Array
|
||||
),
|
||||
item_type=model.Reference(
|
||||
name="block_device_mapping", type=model.Struct
|
||||
),
|
||||
reference=model.Reference(name="block_device_mapping", type=model.Array),
|
||||
item_type=model.Reference(name="block_device_mapping", type=model.Struct),
|
||||
min_ver="2.94",
|
||||
),
|
||||
model.Array(
|
||||
reference=model.Reference(
|
||||
name="block_device_mapping_v2", type=model.Array
|
||||
),
|
||||
item_type=model.Reference(
|
||||
name="block_device_mapping_v2", type=model.Struct
|
||||
),
|
||||
reference=model.Reference(name="block_device_mapping_v2", type=model.Array),
|
||||
item_type=model.Reference(name="block_device_mapping_v2", type=model.Struct),
|
||||
min_ver="2.94",
|
||||
),
|
||||
model.Enum(
|
||||
@ -770,9 +742,7 @@ EXPECTED_DATA_TYPES = [
|
||||
min_ver="2.94",
|
||||
),
|
||||
model.Array(
|
||||
reference=model.Reference(
|
||||
name="trusted_image_certificates", type=model.Array
|
||||
),
|
||||
reference=model.Reference(name="trusted_image_certificates", type=model.Array),
|
||||
item_type=model.ConstraintString(format=None, minLength=1),
|
||||
min_ver="2.94",
|
||||
),
|
||||
@ -781,9 +751,7 @@ EXPECTED_DATA_TYPES = [
|
||||
name="trusted_image_certificates", type=model.OneOfType
|
||||
),
|
||||
kinds=[
|
||||
model.Reference(
|
||||
name="trusted_image_certificates", type=model.Array
|
||||
),
|
||||
model.Reference(name="trusted_image_certificates", type=model.Array),
|
||||
model.PrimitiveNull(),
|
||||
],
|
||||
min_ver="2.94",
|
||||
@ -801,15 +769,11 @@ EXPECTED_DATA_TYPES = [
|
||||
min_ver="2.94",
|
||||
),
|
||||
"imageRef": model.StructField(
|
||||
data_type=model.Reference(
|
||||
name="imageRef", type=model.OneOfType
|
||||
),
|
||||
data_type=model.Reference(name="imageRef", type=model.OneOfType),
|
||||
min_ver="2.94",
|
||||
),
|
||||
"flavorRef": model.StructField(
|
||||
data_type=model.Reference(
|
||||
name="flavorRef", type=model.OneOfType
|
||||
),
|
||||
data_type=model.Reference(name="flavorRef", type=model.OneOfType),
|
||||
is_required=True,
|
||||
min_ver="2.94",
|
||||
),
|
||||
@ -817,24 +781,18 @@ EXPECTED_DATA_TYPES = [
|
||||
data_type=model.ConstraintString(format=None), min_ver="2.94"
|
||||
),
|
||||
"metadata": model.StructField(
|
||||
data_type=model.Reference(
|
||||
name="metadata", type=model.Dictionary
|
||||
),
|
||||
data_type=model.Reference(name="metadata", type=model.Dictionary),
|
||||
description="metadata description",
|
||||
min_ver="2.94",
|
||||
),
|
||||
"networks": model.StructField(
|
||||
data_type=model.Reference(
|
||||
name="networks", type=model.OneOfType
|
||||
),
|
||||
data_type=model.Reference(name="networks", type=model.OneOfType),
|
||||
description="Networks description",
|
||||
is_required=True,
|
||||
min_ver="2.94",
|
||||
),
|
||||
"OS-DCF:diskConfig": model.StructField(
|
||||
data_type=model.Reference(
|
||||
name="OS-DCF:diskConfig", type=model.Enum
|
||||
),
|
||||
data_type=model.Reference(name="OS-DCF:diskConfig", type=model.Enum),
|
||||
description="DiskConfig description",
|
||||
min_ver="2.94",
|
||||
),
|
||||
@ -864,21 +822,15 @@ EXPECTED_DATA_TYPES = [
|
||||
min_ver="2.94",
|
||||
),
|
||||
"config_drive": model.StructField(
|
||||
data_type=model.Reference(
|
||||
name="config_drive", type=model.Enum
|
||||
),
|
||||
data_type=model.Reference(name="config_drive", type=model.Enum),
|
||||
min_ver="2.94",
|
||||
),
|
||||
"min_count": model.StructField(
|
||||
data_type=model.Reference(
|
||||
name="min_count", type=model.OneOfType
|
||||
),
|
||||
data_type=model.Reference(name="min_count", type=model.OneOfType),
|
||||
min_ver="2.94",
|
||||
),
|
||||
"security_groups": model.StructField(
|
||||
data_type=model.Reference(
|
||||
name="security_groups", type=model.Array
|
||||
),
|
||||
data_type=model.Reference(name="security_groups", type=model.Array),
|
||||
description="SG descr",
|
||||
min_ver="2.94",
|
||||
),
|
||||
@ -891,9 +843,7 @@ EXPECTED_DATA_TYPES = [
|
||||
min_ver="2.94",
|
||||
),
|
||||
"description": model.StructField(
|
||||
data_type=model.Reference(
|
||||
name="description", type=model.OneOfType
|
||||
),
|
||||
data_type=model.Reference(name="description", type=model.OneOfType),
|
||||
min_ver="2.94",
|
||||
),
|
||||
"tags": model.StructField(
|
||||
@ -986,9 +936,7 @@ EXPECTED_DATA_TYPES = [
|
||||
min_ver="2.94",
|
||||
),
|
||||
model.OneOfType(
|
||||
reference=model.Reference(
|
||||
name="build_near_host_ip", type=model.OneOfType
|
||||
),
|
||||
reference=model.Reference(name="build_near_host_ip", type=model.OneOfType),
|
||||
kinds=[
|
||||
model.ConstraintString(format="ipv4"),
|
||||
model.ConstraintString(format="ipv6"),
|
||||
@ -996,24 +944,18 @@ EXPECTED_DATA_TYPES = [
|
||||
min_ver="2.94",
|
||||
),
|
||||
model.Struct(
|
||||
reference=model.Reference(
|
||||
name="os:scheduler_hints", type=model.Struct
|
||||
),
|
||||
reference=model.Reference(name="os:scheduler_hints", type=model.Struct),
|
||||
description="scheduler hints description",
|
||||
fields={
|
||||
"group": model.StructField(
|
||||
data_type=model.ConstraintString(format="uuid"), min_ver="2.94"
|
||||
),
|
||||
"different_host": model.StructField(
|
||||
data_type=model.Reference(
|
||||
name="different_host", type=model.OneOfType
|
||||
),
|
||||
data_type=model.Reference(name="different_host", type=model.OneOfType),
|
||||
min_ver="2.94",
|
||||
),
|
||||
"same_host": model.StructField(
|
||||
data_type=model.Reference(
|
||||
name="same_host", type=model.OneOfType
|
||||
),
|
||||
data_type=model.Reference(name="same_host", type=model.OneOfType),
|
||||
description="A list of server UUIDs or a server UUID.",
|
||||
min_ver="2.94",
|
||||
),
|
||||
@ -1028,9 +970,7 @@ EXPECTED_DATA_TYPES = [
|
||||
min_ver="2.94",
|
||||
),
|
||||
"different_cell": model.StructField(
|
||||
data_type=model.Reference(
|
||||
name="different_cell", type=model.OneOfType
|
||||
),
|
||||
data_type=model.Reference(name="different_cell", type=model.OneOfType),
|
||||
min_ver="2.94",
|
||||
),
|
||||
"build_near_host_ip": model.StructField(
|
||||
@ -1114,9 +1054,7 @@ EXPECTED_DATA_TYPES = [
|
||||
min_ver="2.94",
|
||||
),
|
||||
model.OneOfType(
|
||||
reference=model.Reference(
|
||||
name="build_near_host_ip", type=model.OneOfType
|
||||
),
|
||||
reference=model.Reference(name="build_near_host_ip", type=model.OneOfType),
|
||||
kinds=[
|
||||
model.ConstraintString(format="ipv4"),
|
||||
model.ConstraintString(format="ipv6"),
|
||||
@ -1124,24 +1062,18 @@ EXPECTED_DATA_TYPES = [
|
||||
min_ver="2.94",
|
||||
),
|
||||
model.Struct(
|
||||
reference=model.Reference(
|
||||
name="OS-SCH-HNT:scheduler_hints", type=model.Struct
|
||||
),
|
||||
reference=model.Reference(name="OS-SCH-HNT:scheduler_hints", type=model.Struct),
|
||||
fields={
|
||||
"group": model.StructField(
|
||||
data_type=model.ConstraintString(format="uuid"), min_ver="2.94"
|
||||
),
|
||||
"different_host": model.StructField(
|
||||
data_type=model.Reference(
|
||||
name="different_host", type=model.OneOfType
|
||||
),
|
||||
data_type=model.Reference(name="different_host", type=model.OneOfType),
|
||||
description="A list of server UUIDs or a server UUID.\nSchedule the server on a different host from a set of servers.\nIt is available when `DifferentHostFilter` is available on cloud side.",
|
||||
min_ver="2.94",
|
||||
),
|
||||
"same_host": model.StructField(
|
||||
data_type=model.Reference(
|
||||
name="same_host", type=model.OneOfType
|
||||
),
|
||||
data_type=model.Reference(name="same_host", type=model.OneOfType),
|
||||
min_ver="2.94",
|
||||
),
|
||||
"query": model.StructField(
|
||||
@ -1155,9 +1087,7 @@ EXPECTED_DATA_TYPES = [
|
||||
min_ver="2.94",
|
||||
),
|
||||
"different_cell": model.StructField(
|
||||
data_type=model.Reference(
|
||||
name="different_cell", type=model.OneOfType
|
||||
),
|
||||
data_type=model.Reference(name="different_cell", type=model.OneOfType),
|
||||
min_ver="2.94",
|
||||
),
|
||||
"build_near_host_ip": model.StructField(
|
||||
|
@ -27,11 +27,7 @@ class TestParserObject(TestCase):
|
||||
(res, all) = self.parser.parse(schema)
|
||||
self.assertEqual(
|
||||
model.Struct(
|
||||
fields={
|
||||
"foo": model.StructField(
|
||||
data_type=model.ConstraintString()
|
||||
)
|
||||
}
|
||||
fields={"foo": model.StructField(data_type=model.ConstraintString())}
|
||||
),
|
||||
res,
|
||||
)
|
||||
@ -48,11 +44,7 @@ class TestParserObject(TestCase):
|
||||
(res, all) = self.parser.parse(schema)
|
||||
self.assertEqual(
|
||||
model.Struct(
|
||||
fields={
|
||||
"foo": model.StructField(
|
||||
data_type=model.ConstraintString()
|
||||
)
|
||||
}
|
||||
fields={"foo": model.StructField(data_type=model.ConstraintString())}
|
||||
),
|
||||
res,
|
||||
)
|
||||
@ -69,11 +61,7 @@ class TestParserObject(TestCase):
|
||||
(res, all) = self.parser.parse(schema)
|
||||
self.assertEqual(
|
||||
model.Struct(
|
||||
fields={
|
||||
"foo": model.StructField(
|
||||
data_type=model.ConstraintString()
|
||||
)
|
||||
},
|
||||
fields={"foo": model.StructField(data_type=model.ConstraintString())},
|
||||
additional_fields=model.PrimitiveAny(),
|
||||
),
|
||||
res,
|
||||
@ -91,11 +79,7 @@ class TestParserObject(TestCase):
|
||||
(res, all) = self.parser.parse(schema)
|
||||
self.assertEqual(
|
||||
model.Struct(
|
||||
fields={
|
||||
"foo": model.StructField(
|
||||
data_type=model.ConstraintString()
|
||||
)
|
||||
},
|
||||
fields={"foo": model.StructField(data_type=model.ConstraintString())},
|
||||
additional_fields=model.ConstraintString(),
|
||||
),
|
||||
res,
|
||||
@ -208,11 +192,7 @@ class TestParserObject(TestCase):
|
||||
(res, all) = self.parser.parse(schema)
|
||||
self.assertEqual(
|
||||
model.Struct(
|
||||
fields={
|
||||
"foo": model.StructField(
|
||||
data_type=model.ConstraintString()
|
||||
)
|
||||
},
|
||||
fields={"foo": model.StructField(data_type=model.ConstraintString())},
|
||||
pattern_properties={
|
||||
"^A": model.ConstraintString(),
|
||||
},
|
||||
@ -266,16 +246,12 @@ class TestParserObject(TestCase):
|
||||
kinds=[
|
||||
model.Struct(
|
||||
fields={
|
||||
"foo": model.StructField(
|
||||
data_type=model.ConstraintString()
|
||||
)
|
||||
"foo": model.StructField(data_type=model.ConstraintString())
|
||||
},
|
||||
),
|
||||
model.Struct(
|
||||
fields={
|
||||
"bar": model.StructField(
|
||||
data_type=model.ConstraintString()
|
||||
)
|
||||
"bar": model.StructField(data_type=model.ConstraintString())
|
||||
},
|
||||
),
|
||||
]
|
||||
@ -310,9 +286,7 @@ class TestParserObject(TestCase):
|
||||
"foo": model.StructField(
|
||||
data_type=model.ConstraintString(), is_required=True
|
||||
),
|
||||
"bar": model.StructField(
|
||||
data_type=model.ConstraintString()
|
||||
),
|
||||
"bar": model.StructField(data_type=model.ConstraintString()),
|
||||
},
|
||||
),
|
||||
res,
|
||||
|
@ -13,7 +13,6 @@
|
||||
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
sys.path.insert(0, os.path.abspath("../.."))
|
||||
sys.path.insert(0, os.path.abspath("."))
|
||||
|
14
pyptoject.toml
Normal file
14
pyptoject.toml
Normal file
@ -0,0 +1,14 @@
|
||||
[tool.ruff]
|
||||
line-length = 79
|
||||
target-version = "py38"
|
||||
|
||||
[tool.ruff.lint]
|
||||
# enable the following rule classes:
|
||||
#
|
||||
# C4: https://docs.astral.sh/ruff/rules/#flake8-comprehensions-c4
|
||||
# UP: https://docs.astral.sh/ruff/rules/#pyupgrade-up
|
||||
select = ["C4", "UP"]
|
||||
[tool.ruff.format]
|
||||
quote-style = "preserve"
|
||||
docstring-code-format = true
|
||||
skip-magic-trailing-comma = true
|
Loading…
x
Reference in New Issue
Block a user