blob: 4d436804ee5304b7c2d8a9c0e44afba96f9d86eb [file] [log] [blame]
# coding: utf-8
"""
Airflow API
Airflow API. All endpoints located under ``/api/v2`` can be used safely, are stable and backward compatible. Endpoints located under ``/ui`` are dedicated to the UI and are subject to breaking change depending on the need of the frontend. Users should not rely on those but use the public ones instead.
The version of the OpenAPI document: 2
Generated by OpenAPI Generator (https://openapi-generator.tech)
Do not edit the class manually.
""" # noqa: E501
from __future__ import annotations
import pprint
import re # noqa: F401
import json
from datetime import datetime
from pydantic import BaseModel, ConfigDict, Field, StrictStr
from typing import Any, ClassVar, Dict, List, Optional
from typing_extensions import Annotated
from airflow_client.client.models.dag_run_state import DagRunState
from typing import Optional, Set
from typing_extensions import Self
class DAGRunsBatchBody(BaseModel):
"""
List DAG Runs body for batch endpoint.
""" # noqa: E501
dag_ids: Optional[List[StrictStr]] = None
end_date_gt: Optional[datetime] = None
end_date_gte: Optional[datetime] = None
end_date_lt: Optional[datetime] = None
end_date_lte: Optional[datetime] = None
logical_date_gt: Optional[datetime] = None
logical_date_gte: Optional[datetime] = None
logical_date_lt: Optional[datetime] = None
logical_date_lte: Optional[datetime] = None
order_by: Optional[StrictStr] = None
page_limit: Optional[Annotated[int, Field(strict=True, ge=0)]] = 100
page_offset: Optional[Annotated[int, Field(strict=True, ge=0)]] = 0
run_after_gt: Optional[datetime] = None
run_after_gte: Optional[datetime] = None
run_after_lt: Optional[datetime] = None
run_after_lte: Optional[datetime] = None
start_date_gt: Optional[datetime] = None
start_date_gte: Optional[datetime] = None
start_date_lt: Optional[datetime] = None
start_date_lte: Optional[datetime] = None
states: Optional[List[Optional[DagRunState]]] = None
__properties: ClassVar[List[str]] = ["dag_ids", "end_date_gt", "end_date_gte", "end_date_lt", "end_date_lte", "logical_date_gt", "logical_date_gte", "logical_date_lt", "logical_date_lte", "order_by", "page_limit", "page_offset", "run_after_gt", "run_after_gte", "run_after_lt", "run_after_lte", "start_date_gt", "start_date_gte", "start_date_lt", "start_date_lte", "states"]
model_config = ConfigDict(
populate_by_name=True,
validate_assignment=True,
protected_namespaces=(),
)
def to_str(self) -> str:
"""Returns the string representation of the model using alias"""
return pprint.pformat(self.model_dump(by_alias=True))
def to_json(self) -> str:
"""Returns the JSON representation of the model using alias"""
# TODO: pydantic v2: use .model_dump_json(by_alias=True, exclude_unset=True) instead
return json.dumps(self.to_dict())
@classmethod
def from_json(cls, json_str: str) -> Optional[Self]:
"""Create an instance of DAGRunsBatchBody from a JSON string"""
return cls.from_dict(json.loads(json_str))
def to_dict(self) -> Dict[str, Any]:
"""Return the dictionary representation of the model using alias.
This has the following differences from calling pydantic's
`self.model_dump(by_alias=True)`:
* `None` is only added to the output dict for nullable fields that
were set at model initialization. Other fields with value `None`
are ignored.
"""
excluded_fields: Set[str] = set([
])
_dict = self.model_dump(
by_alias=True,
exclude=excluded_fields,
exclude_none=True,
)
return _dict
@classmethod
def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]:
"""Create an instance of DAGRunsBatchBody from a dict"""
if obj is None:
return None
if not isinstance(obj, dict):
return cls.model_validate(obj)
_obj = cls.model_validate({
"dag_ids": obj.get("dag_ids"),
"end_date_gt": obj.get("end_date_gt"),
"end_date_gte": obj.get("end_date_gte"),
"end_date_lt": obj.get("end_date_lt"),
"end_date_lte": obj.get("end_date_lte"),
"logical_date_gt": obj.get("logical_date_gt"),
"logical_date_gte": obj.get("logical_date_gte"),
"logical_date_lt": obj.get("logical_date_lt"),
"logical_date_lte": obj.get("logical_date_lte"),
"order_by": obj.get("order_by"),
"page_limit": obj.get("page_limit") if obj.get("page_limit") is not None else 100,
"page_offset": obj.get("page_offset") if obj.get("page_offset") is not None else 0,
"run_after_gt": obj.get("run_after_gt"),
"run_after_gte": obj.get("run_after_gte"),
"run_after_lt": obj.get("run_after_lt"),
"run_after_lte": obj.get("run_after_lte"),
"start_date_gt": obj.get("start_date_gt"),
"start_date_gte": obj.get("start_date_gte"),
"start_date_lt": obj.get("start_date_lt"),
"start_date_lte": obj.get("start_date_lte"),
"states": obj.get("states")
})
return _obj