From 712f64344f0c1f3624101bbff41723a77735921f Mon Sep 17 00:00:00 2001 From: SDK Generator Bot Date: Thu, 7 Aug 2025 10:14:07 +0000 Subject: [PATCH] Generate observability --- .../src/stackit/observability/__init__.py | 4 + .../stackit/observability/api/default_api.py | 29 +++++++ .../stackit/observability/models/__init__.py | 3 + .../models/create_credentials_payload.py | 84 +++++++++++++++++++ .../observability/models/credentials.py | 7 +- .../observability/models/service_keys_list.py | 10 ++- ...pdate_metrics_storage_retention_payload.py | 6 +- 7 files changed, 136 insertions(+), 7 deletions(-) create mode 100644 services/observability/src/stackit/observability/models/create_credentials_payload.py diff --git a/services/observability/src/stackit/observability/__init__.py b/services/observability/src/stackit/observability/__init__.py index ed4a582de..92727d97e 100644 --- a/services/observability/src/stackit/observability/__init__.py +++ b/services/observability/src/stackit/observability/__init__.py @@ -47,6 +47,7 @@ "CreateAlertConfigRoutePayloadRoutesInner", "CreateAlertgroupsPayload", "CreateAlertrulesPayload", + "CreateCredentialsPayload", "CreateCredentialsResponse", "CreateInstancePayload", "CreateInstanceResponse", @@ -189,6 +190,9 @@ from stackit.observability.models.create_alertrules_payload import ( CreateAlertrulesPayload as CreateAlertrulesPayload, ) +from stackit.observability.models.create_credentials_payload import ( + CreateCredentialsPayload as CreateCredentialsPayload, +) from stackit.observability.models.create_credentials_response import ( CreateCredentialsResponse as CreateCredentialsResponse, ) diff --git a/services/observability/src/stackit/observability/api/default_api.py b/services/observability/src/stackit/observability/api/default_api.py index 2752f1dd4..97239012d 100644 --- a/services/observability/src/stackit/observability/api/default_api.py +++ b/services/observability/src/stackit/observability/api/default_api.py @@ -41,6 +41,9 @@ from stackit.observability.models.create_alertrules_payload import ( CreateAlertrulesPayload, ) +from stackit.observability.models.create_credentials_payload import ( + CreateCredentialsPayload, +) from stackit.observability.models.create_credentials_response import ( CreateCredentialsResponse, ) @@ -1248,6 +1251,7 @@ def create_credentials( self, instance_id: StrictStr, project_id: StrictStr, + create_credentials_payload: Optional[CreateCredentialsPayload] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1266,6 +1270,8 @@ def create_credentials( :type instance_id: str :param project_id: (required) :type project_id: str + :param create_credentials_payload: + :type create_credentials_payload: CreateCredentialsPayload :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1291,6 +1297,7 @@ def create_credentials( _param = self._create_credentials_serialize( instance_id=instance_id, project_id=project_id, + create_credentials_payload=create_credentials_payload, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1299,6 +1306,7 @@ def create_credentials( _response_types_map: Dict[str, Optional[str]] = { "201": "CreateCredentialsResponse", + "400": "Error", "403": "PermissionDenied", } response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) @@ -1313,6 +1321,7 @@ def create_credentials_with_http_info( self, instance_id: StrictStr, project_id: StrictStr, + create_credentials_payload: Optional[CreateCredentialsPayload] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1331,6 +1340,8 @@ def create_credentials_with_http_info( :type instance_id: str :param project_id: (required) :type project_id: str + :param create_credentials_payload: + :type create_credentials_payload: CreateCredentialsPayload :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1356,6 +1367,7 @@ def create_credentials_with_http_info( _param = self._create_credentials_serialize( instance_id=instance_id, project_id=project_id, + create_credentials_payload=create_credentials_payload, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1364,6 +1376,7 @@ def create_credentials_with_http_info( _response_types_map: Dict[str, Optional[str]] = { "201": "CreateCredentialsResponse", + "400": "Error", "403": "PermissionDenied", } response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) @@ -1378,6 +1391,7 @@ def create_credentials_without_preload_content( self, instance_id: StrictStr, project_id: StrictStr, + create_credentials_payload: Optional[CreateCredentialsPayload] = None, _request_timeout: Union[ None, Annotated[StrictFloat, Field(gt=0)], @@ -1396,6 +1410,8 @@ def create_credentials_without_preload_content( :type instance_id: str :param project_id: (required) :type project_id: str + :param create_credentials_payload: + :type create_credentials_payload: CreateCredentialsPayload :param _request_timeout: timeout setting for this request. If one number provided, it will be total request timeout. It can also be a pair (tuple) of @@ -1421,6 +1437,7 @@ def create_credentials_without_preload_content( _param = self._create_credentials_serialize( instance_id=instance_id, project_id=project_id, + create_credentials_payload=create_credentials_payload, _request_auth=_request_auth, _content_type=_content_type, _headers=_headers, @@ -1429,6 +1446,7 @@ def create_credentials_without_preload_content( _response_types_map: Dict[str, Optional[str]] = { "201": "CreateCredentialsResponse", + "400": "Error", "403": "PermissionDenied", } response_data = self.api_client.call_api(*_param, _request_timeout=_request_timeout) @@ -1438,6 +1456,7 @@ def _create_credentials_serialize( self, instance_id, project_id, + create_credentials_payload, _request_auth, _content_type, _headers, @@ -1464,11 +1483,21 @@ def _create_credentials_serialize( # process the header parameters # process the form parameters # process the body parameter + if create_credentials_payload is not None: + _body_params = create_credentials_payload # set the HTTP header `Accept` if "Accept" not in _header_params: _header_params["Accept"] = self.api_client.select_header_accept(["application/json"]) + # set the HTTP header `Content-Type` + if _content_type: + _header_params["Content-Type"] = _content_type + else: + _default_content_type = self.api_client.select_header_content_type(["application/json"]) + if _default_content_type is not None: + _header_params["Content-Type"] = _default_content_type + # authentication setting _auth_settings: List[str] = [] diff --git a/services/observability/src/stackit/observability/models/__init__.py b/services/observability/src/stackit/observability/models/__init__.py index 92929947a..a03349281 100644 --- a/services/observability/src/stackit/observability/models/__init__.py +++ b/services/observability/src/stackit/observability/models/__init__.py @@ -53,6 +53,9 @@ from stackit.observability.models.create_alertrules_payload import ( CreateAlertrulesPayload, ) +from stackit.observability.models.create_credentials_payload import ( + CreateCredentialsPayload, +) from stackit.observability.models.create_credentials_response import ( CreateCredentialsResponse, ) diff --git a/services/observability/src/stackit/observability/models/create_credentials_payload.py b/services/observability/src/stackit/observability/models/create_credentials_payload.py new file mode 100644 index 000000000..bb85c9fd2 --- /dev/null +++ b/services/observability/src/stackit/observability/models/create_credentials_payload.py @@ -0,0 +1,84 @@ +# coding: utf-8 + +""" + STACKIT Observability API + + API endpoints for Observability on STACKIT + + The version of the OpenAPI document: 1.1.1 + Contact: stackit-argus@mail.schwarz + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + +from __future__ import annotations + +import json +import pprint +from typing import Any, ClassVar, Dict, List, Optional, Set + +from pydantic import BaseModel, ConfigDict, Field +from typing_extensions import Annotated, Self + + +class CreateCredentialsPayload(BaseModel): + """ + Create new credentials with (optional) description + """ # noqa: E501 + + description: Optional[Annotated[str, Field(min_length=1, strict=True, max_length=1000)]] = Field( + default=None, description="description" + ) + __properties: ClassVar[List[str]] = ["description"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + # TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead + return json.dumps(self.to_dict()) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of CreateCredentialsPayload from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of CreateCredentialsPayload from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({"description": obj.get("description")}) + return _obj diff --git a/services/observability/src/stackit/observability/models/credentials.py b/services/observability/src/stackit/observability/models/credentials.py index 5836e6466..53b479621 100644 --- a/services/observability/src/stackit/observability/models/credentials.py +++ b/services/observability/src/stackit/observability/models/credentials.py @@ -27,9 +27,10 @@ class Credentials(BaseModel): Credentials """ # noqa: E501 + description: Optional[Annotated[str, Field(min_length=1, strict=True, max_length=1000)]] = None password: Annotated[str, Field(min_length=1, strict=True)] username: Annotated[str, Field(min_length=1, strict=True)] - __properties: ClassVar[List[str]] = ["password", "username"] + __properties: ClassVar[List[str]] = ["description", "password", "username"] model_config = ConfigDict( populate_by_name=True, @@ -79,5 +80,7 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: if not isinstance(obj, dict): return cls.model_validate(obj) - _obj = cls.model_validate({"password": obj.get("password"), "username": obj.get("username")}) + _obj = cls.model_validate( + {"description": obj.get("description"), "password": obj.get("password"), "username": obj.get("username")} + ) return _obj diff --git a/services/observability/src/stackit/observability/models/service_keys_list.py b/services/observability/src/stackit/observability/models/service_keys_list.py index bad3ed687..dc118a509 100644 --- a/services/observability/src/stackit/observability/models/service_keys_list.py +++ b/services/observability/src/stackit/observability/models/service_keys_list.py @@ -28,9 +28,10 @@ class ServiceKeysList(BaseModel): """ # noqa: E501 credentials_info: Optional[Dict[str, Optional[StrictStr]]] = Field(default=None, alias="credentialsInfo") + description: Optional[Annotated[str, Field(min_length=1, strict=True, max_length=1000)]] = None id: Annotated[str, Field(min_length=1, strict=True, max_length=200)] name: Annotated[str, Field(min_length=1, strict=True, max_length=200)] - __properties: ClassVar[List[str]] = ["credentialsInfo", "id", "name"] + __properties: ClassVar[List[str]] = ["credentialsInfo", "description", "id", "name"] model_config = ConfigDict( populate_by_name=True, @@ -81,6 +82,11 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: return cls.model_validate(obj) _obj = cls.model_validate( - {"credentialsInfo": obj.get("credentialsInfo"), "id": obj.get("id"), "name": obj.get("name")} + { + "credentialsInfo": obj.get("credentialsInfo"), + "description": obj.get("description"), + "id": obj.get("id"), + "name": obj.get("name"), + } ) return _obj diff --git a/services/observability/src/stackit/observability/models/update_metrics_storage_retention_payload.py b/services/observability/src/stackit/observability/models/update_metrics_storage_retention_payload.py index 589a43274..e9d130e66 100644 --- a/services/observability/src/stackit/observability/models/update_metrics_storage_retention_payload.py +++ b/services/observability/src/stackit/observability/models/update_metrics_storage_retention_payload.py @@ -28,15 +28,15 @@ class UpdateMetricsStorageRetentionPayload(BaseModel): """ # noqa: E501 metrics_retention_time1h: Annotated[str, Field(min_length=2, strict=True, max_length=8)] = Field( - description="Retention time of longtime storage of 1h sampled data. After that time the data will be deleted permanently. `Additional Validators:` * Should be a valid time string * Should not be bigger than metricsRetentionTime5m", + description="Retention time of longtime storage of 1h sampled data. After that time the 1h sampled data will be deleted permanently. The goal of downsampling is to get fast results for queries over long time intervals. It is recommended to set this value to be the same as metricsRetentionTimeRaw to ensure zoom-in capabilities in your dashboards. The default value is 90 days. `Additional Validators:` * Should be a valid time string (e.g. '90d'). * Should be between '10d' and '780d'. * Note: For compatibility reasons, values between '0d' and '792d' are also accepted. However, these will be automatically adjusted in the backend to the recommended range of '10d' to '780d'.", alias="metricsRetentionTime1h", ) metrics_retention_time5m: Annotated[str, Field(min_length=2, strict=True, max_length=8)] = Field( - description="Retention time of longtime storage of 5m sampled data. After that time the data will be down sampled to 1h. `Additional Validators:` * Should be a valid time string * Should not be bigger than metricsRetentionTimeRaw", + description="Retention time of longtime storage of 5m sampled data. After that time the 5m sampled data will be deleted permanently. All 5m resolution metrics older than 10 days are downsampled at a 1h resolution. The goal of downsampling is to get fast results for queries over long time intervals. It is recommended to set this value to be the same as metricsRetentionTimeRaw to ensure zoom-in capabilities in your dashboards. The default value is 90 days. `Additional Validators:` * Should be a valid time string (e.g. '90d'). * Should be between '10d' and '780d'. * Note: For compatibility reasons, values between '0d' and '792d' are also accepted. However, these will be automatically adjusted in the backend to the recommended range of '10d' to '780d'.", alias="metricsRetentionTime5m", ) metrics_retention_time_raw: Annotated[str, Field(min_length=2, strict=True, max_length=8)] = Field( - description="Retention time of longtime storage of raw sampled data. After that time the data will be down sampled to 5m. Keep in mind, that the initial goal of downsampling is not saving disk or object storage space. In fact, downsampling doesn't save you any space but instead, it adds 2 more blocks for each raw block which are only slightly smaller or relatively similar size to raw block. This is done by internal downsampling implementation which to be mathematically correct holds various aggregations. This means that downsampling can increase the size of your storage a bit (~3x), if you choose to store all resolutions (recommended). The goal of downsampling is to provide an opportunity to get fast results for range queries of big time intervals like months or years. `Additional Validators:` * Should be a valid time string * Should not be bigger than 13 months", + description="Retention time of longtime storage of raw data. After that time the raw data will be deleted permanently. All raw resolution metrics that are older than 40 hours are downsampled at a 5m resolution. The default value is 90 days. `Additional Validators:` * Should be a valid time string (e.g. '90d'). * Should be between '2d' and '780d'. * Note: For compatibility reasons, values between '0d' and '792d' are also accepted. However, these will be automatically adjusted in the backend to the recommended range of '2d' to '780d'.", alias="metricsRetentionTimeRaw", ) __properties: ClassVar[List[str]] = ["metricsRetentionTime1h", "metricsRetentionTime5m", "metricsRetentionTimeRaw"]