Update Python Client to 3.1.6rc1 (#142)

diff --git a/CHANGELOG.md b/CHANGELOG.md
index 095211f..b5e9aa8 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -17,12 +17,16 @@
  under the License.
  -->
 
-# v3.1.4
+# v3.1.6
 
 ## Bug Fixes:
 
  - Update refresh token flow ([#55506](https://github.com/apache/airflow/pull/55506))
- - Patch pools should have an optional description ([#58066](https://github.com/apache/airflow/pull/58066))
+ - Patch pools should have an optional description ([#58066](https://github.com/apache/airflow/pull/58066))(#59728)
+ - Add task group ID filtering support to task instance query [#59511](https://github.com/apache/airflow/pull/59511)
+ - Fix backfill run_on_latest_version defaulting to False instead of True [#59328](https://github.com/apache/airflow/pull/59328)
+ - Update refresh token flow [#58649](https://github.com/apache/airflow/pull/58649)
+ - Patch pools should have an optional description [#58169](https://github.com/apache/airflow/pull/58169)
 
 # v3.1.3
 
diff --git a/airflow_client/client/__init__.py b/airflow_client/client/__init__.py
index b47b50a..d3bc15d 100644
--- a/airflow_client/client/__init__.py
+++ b/airflow_client/client/__init__.py
@@ -14,193 +14,381 @@
 """  # noqa: E501
 
 
-__version__ = "3.1.4"
+__version__ = "3.1.6"
+
+# Define package exports
+__all__ = [
+    "AssetApi",
+    "BackfillApi",
+    "ConfigApi",
+    "ConnectionApi",
+    "DAGApi",
+    "DAGParsingApi",
+    "DagRunApi",
+    "DagSourceApi",
+    "DagStatsApi",
+    "DagVersionApi",
+    "DagWarningApi",
+    "EventLogApi",
+    "ExtraLinksApi",
+    "ImportErrorApi",
+    "JobApi",
+    "LoginApi",
+    "MonitorApi",
+    "PluginApi",
+    "PoolApi",
+    "ProviderApi",
+    "TaskApi",
+    "TaskInstanceApi",
+    "VariableApi",
+    "VersionApi",
+    "XComApi",
+    "ExperimentalApi",
+    "ApiResponse",
+    "ApiClient",
+    "Configuration",
+    "OpenApiException",
+    "ApiTypeError",
+    "ApiValueError",
+    "ApiKeyError",
+    "ApiAttributeError",
+    "ApiException",
+    "AppBuilderMenuItemResponse",
+    "AppBuilderViewResponse",
+    "AssetAliasCollectionResponse",
+    "AssetAliasResponse",
+    "AssetCollectionResponse",
+    "AssetEventCollectionResponse",
+    "AssetEventResponse",
+    "AssetResponse",
+    "BackfillCollectionResponse",
+    "BackfillPostBody",
+    "BackfillResponse",
+    "BaseInfoResponse",
+    "BulkActionNotOnExistence",
+    "BulkActionOnExistence",
+    "BulkActionResponse",
+    "BulkBodyBulkTaskInstanceBody",
+    "BulkBodyBulkTaskInstanceBodyActionsInner",
+    "BulkBodyConnectionBody",
+    "BulkBodyConnectionBodyActionsInner",
+    "BulkBodyPoolBody",
+    "BulkBodyPoolBodyActionsInner",
+    "BulkBodyVariableBody",
+    "BulkBodyVariableBodyActionsInner",
+    "BulkCreateActionBulkTaskInstanceBody",
+    "BulkCreateActionConnectionBody",
+    "BulkCreateActionPoolBody",
+    "BulkCreateActionVariableBody",
+    "BulkDeleteActionBulkTaskInstanceBody",
+    "BulkDeleteActionBulkTaskInstanceBodyEntitiesInner",
+    "BulkDeleteActionConnectionBody",
+    "BulkDeleteActionPoolBody",
+    "BulkDeleteActionVariableBody",
+    "BulkResponse",
+    "BulkTaskInstanceBody",
+    "BulkUpdateActionBulkTaskInstanceBody",
+    "BulkUpdateActionConnectionBody",
+    "BulkUpdateActionPoolBody",
+    "BulkUpdateActionVariableBody",
+    "ClearTaskInstancesBody",
+    "ClearTaskInstancesBodyTaskIdsInner",
+    "Config",
+    "ConfigOption",
+    "ConfigSection",
+    "ConnectionBody",
+    "ConnectionCollectionResponse",
+    "ConnectionResponse",
+    "ConnectionTestResponse",
+    "Content",
+    "CreateAssetEventsBody",
+    "DAGCollectionResponse",
+    "DAGDetailsResponse",
+    "DAGPatchBody",
+    "DAGResponse",
+    "DAGRunClearBody",
+    "DAGRunCollectionResponse",
+    "DAGRunPatchBody",
+    "DAGRunPatchStates",
+    "DAGRunResponse",
+    "DAGRunsBatchBody",
+    "DAGSourceResponse",
+    "DAGTagCollectionResponse",
+    "DAGVersionCollectionResponse",
+    "DAGWarningCollectionResponse",
+    "DAGWarningResponse",
+    "DagProcessorInfoResponse",
+    "DagRunAssetReference",
+    "DagRunState",
+    "DagRunTriggeredByType",
+    "DagRunType",
+    "DagScheduleAssetReference",
+    "DagStatsCollectionResponse",
+    "DagStatsResponse",
+    "DagStatsStateResponse",
+    "DagTagResponse",
+    "DagVersionResponse",
+    "DagWarningType",
+    "Detail",
+    "DryRunBackfillCollectionResponse",
+    "DryRunBackfillResponse",
+    "EventLogCollectionResponse",
+    "EventLogResponse",
+    "ExternalLogUrlResponse",
+    "ExternalViewResponse",
+    "ExtraLinkCollectionResponse",
+    "FastAPIAppResponse",
+    "FastAPIRootMiddlewareResponse",
+    "HITLDetail",
+    "HITLDetailCollection",
+    "HITLDetailResponse",
+    "HITLUser",
+    "HTTPExceptionResponse",
+    "HTTPValidationError",
+    "HealthInfoResponse",
+    "ImportErrorCollectionResponse",
+    "ImportErrorResponse",
+    "JobCollectionResponse",
+    "JobResponse",
+    "LastAssetEventResponse",
+    "PatchTaskInstanceBody",
+    "PluginCollectionResponse",
+    "PluginImportErrorCollectionResponse",
+    "PluginImportErrorResponse",
+    "PluginResponse",
+    "PoolBody",
+    "PoolCollectionResponse",
+    "PoolPatchBody",
+    "PoolResponse",
+    "ProviderCollectionResponse",
+    "ProviderResponse",
+    "QueuedEventCollectionResponse",
+    "QueuedEventResponse",
+    "ReactAppResponse",
+    "ReprocessBehavior",
+    "ResponseClearDagRun",
+    "ResponseGetXcomEntry",
+    "SchedulerInfoResponse",
+    "StructuredLogMessage",
+    "TaskCollectionResponse",
+    "TaskDependencyCollectionResponse",
+    "TaskDependencyResponse",
+    "TaskInletAssetReference",
+    "TaskInstanceCollectionResponse",
+    "TaskInstanceHistoryCollectionResponse",
+    "TaskInstanceHistoryResponse",
+    "TaskInstanceResponse",
+    "TaskInstanceState",
+    "TaskInstancesBatchBody",
+    "TaskInstancesLogResponse",
+    "TaskOutletAssetReference",
+    "TaskResponse",
+    "TimeDelta",
+    "TriggerDAGRunPostBody",
+    "TriggerResponse",
+    "TriggererInfoResponse",
+    "UpdateHITLDetailPayload",
+    "ValidationError",
+    "ValidationErrorLocInner",
+    "Value",
+    "VariableBody",
+    "VariableCollectionResponse",
+    "VariableResponse",
+    "VersionInfo",
+    "XComCollectionResponse",
+    "XComCreateBody",
+    "XComResponse",
+    "XComResponseNative",
+    "XComResponseString",
+    "XComUpdateBody",
+]
 
 # import apis into sdk package
-from airflow_client.client.api.asset_api import AssetApi
-from airflow_client.client.api.backfill_api import BackfillApi
-from airflow_client.client.api.config_api import ConfigApi
-from airflow_client.client.api.connection_api import ConnectionApi
-from airflow_client.client.api.dag_api import DAGApi
-from airflow_client.client.api.dag_parsing_api import DAGParsingApi
-from airflow_client.client.api.dag_run_api import DagRunApi
-from airflow_client.client.api.dag_source_api import DagSourceApi
-from airflow_client.client.api.dag_stats_api import DagStatsApi
-from airflow_client.client.api.dag_version_api import DagVersionApi
-from airflow_client.client.api.dag_warning_api import DagWarningApi
-from airflow_client.client.api.event_log_api import EventLogApi
-from airflow_client.client.api.extra_links_api import ExtraLinksApi
-from airflow_client.client.api.import_error_api import ImportErrorApi
-from airflow_client.client.api.job_api import JobApi
-from airflow_client.client.api.login_api import LoginApi
-from airflow_client.client.api.monitor_api import MonitorApi
-from airflow_client.client.api.plugin_api import PluginApi
-from airflow_client.client.api.pool_api import PoolApi
-from airflow_client.client.api.provider_api import ProviderApi
-from airflow_client.client.api.task_api import TaskApi
-from airflow_client.client.api.task_instance_api import TaskInstanceApi
-from airflow_client.client.api.variable_api import VariableApi
-from airflow_client.client.api.version_api import VersionApi
-from airflow_client.client.api.x_com_api import XComApi
-from airflow_client.client.api.experimental_api import ExperimentalApi
+from airflow_client.client.api.asset_api import AssetApi as AssetApi
+from airflow_client.client.api.backfill_api import BackfillApi as BackfillApi
+from airflow_client.client.api.config_api import ConfigApi as ConfigApi
+from airflow_client.client.api.connection_api import ConnectionApi as ConnectionApi
+from airflow_client.client.api.dag_api import DAGApi as DAGApi
+from airflow_client.client.api.dag_parsing_api import DAGParsingApi as DAGParsingApi
+from airflow_client.client.api.dag_run_api import DagRunApi as DagRunApi
+from airflow_client.client.api.dag_source_api import DagSourceApi as DagSourceApi
+from airflow_client.client.api.dag_stats_api import DagStatsApi as DagStatsApi
+from airflow_client.client.api.dag_version_api import DagVersionApi as DagVersionApi
+from airflow_client.client.api.dag_warning_api import DagWarningApi as DagWarningApi
+from airflow_client.client.api.event_log_api import EventLogApi as EventLogApi
+from airflow_client.client.api.extra_links_api import ExtraLinksApi as ExtraLinksApi
+from airflow_client.client.api.import_error_api import ImportErrorApi as ImportErrorApi
+from airflow_client.client.api.job_api import JobApi as JobApi
+from airflow_client.client.api.login_api import LoginApi as LoginApi
+from airflow_client.client.api.monitor_api import MonitorApi as MonitorApi
+from airflow_client.client.api.plugin_api import PluginApi as PluginApi
+from airflow_client.client.api.pool_api import PoolApi as PoolApi
+from airflow_client.client.api.provider_api import ProviderApi as ProviderApi
+from airflow_client.client.api.task_api import TaskApi as TaskApi
+from airflow_client.client.api.task_instance_api import TaskInstanceApi as TaskInstanceApi
+from airflow_client.client.api.variable_api import VariableApi as VariableApi
+from airflow_client.client.api.version_api import VersionApi as VersionApi
+from airflow_client.client.api.x_com_api import XComApi as XComApi
+from airflow_client.client.api.experimental_api import ExperimentalApi as ExperimentalApi
 
 # import ApiClient
-from airflow_client.client.api_response import ApiResponse
-from airflow_client.client.api_client import ApiClient
-from airflow_client.client.configuration import Configuration
-from airflow_client.client.exceptions import OpenApiException
-from airflow_client.client.exceptions import ApiTypeError
-from airflow_client.client.exceptions import ApiValueError
-from airflow_client.client.exceptions import ApiKeyError
-from airflow_client.client.exceptions import ApiAttributeError
-from airflow_client.client.exceptions import ApiException
+from airflow_client.client.api_response import ApiResponse as ApiResponse
+from airflow_client.client.api_client import ApiClient as ApiClient
+from airflow_client.client.configuration import Configuration as Configuration
+from airflow_client.client.exceptions import OpenApiException as OpenApiException
+from airflow_client.client.exceptions import ApiTypeError as ApiTypeError
+from airflow_client.client.exceptions import ApiValueError as ApiValueError
+from airflow_client.client.exceptions import ApiKeyError as ApiKeyError
+from airflow_client.client.exceptions import ApiAttributeError as ApiAttributeError
+from airflow_client.client.exceptions import ApiException as ApiException
 
 # import models into sdk package
-from airflow_client.client.models.app_builder_menu_item_response import AppBuilderMenuItemResponse
-from airflow_client.client.models.app_builder_view_response import AppBuilderViewResponse
-from airflow_client.client.models.asset_alias_collection_response import AssetAliasCollectionResponse
-from airflow_client.client.models.asset_alias_response import AssetAliasResponse
-from airflow_client.client.models.asset_collection_response import AssetCollectionResponse
-from airflow_client.client.models.asset_event_collection_response import AssetEventCollectionResponse
-from airflow_client.client.models.asset_event_response import AssetEventResponse
-from airflow_client.client.models.asset_response import AssetResponse
-from airflow_client.client.models.backfill_collection_response import BackfillCollectionResponse
-from airflow_client.client.models.backfill_post_body import BackfillPostBody
-from airflow_client.client.models.backfill_response import BackfillResponse
-from airflow_client.client.models.base_info_response import BaseInfoResponse
-from airflow_client.client.models.bulk_action_not_on_existence import BulkActionNotOnExistence
-from airflow_client.client.models.bulk_action_on_existence import BulkActionOnExistence
-from airflow_client.client.models.bulk_action_response import BulkActionResponse
-from airflow_client.client.models.bulk_body_bulk_task_instance_body import BulkBodyBulkTaskInstanceBody
-from airflow_client.client.models.bulk_body_bulk_task_instance_body_actions_inner import BulkBodyBulkTaskInstanceBodyActionsInner
-from airflow_client.client.models.bulk_body_connection_body import BulkBodyConnectionBody
-from airflow_client.client.models.bulk_body_connection_body_actions_inner import BulkBodyConnectionBodyActionsInner
-from airflow_client.client.models.bulk_body_pool_body import BulkBodyPoolBody
-from airflow_client.client.models.bulk_body_pool_body_actions_inner import BulkBodyPoolBodyActionsInner
-from airflow_client.client.models.bulk_body_variable_body import BulkBodyVariableBody
-from airflow_client.client.models.bulk_body_variable_body_actions_inner import BulkBodyVariableBodyActionsInner
-from airflow_client.client.models.bulk_create_action_bulk_task_instance_body import BulkCreateActionBulkTaskInstanceBody
-from airflow_client.client.models.bulk_create_action_connection_body import BulkCreateActionConnectionBody
-from airflow_client.client.models.bulk_create_action_pool_body import BulkCreateActionPoolBody
-from airflow_client.client.models.bulk_create_action_variable_body import BulkCreateActionVariableBody
-from airflow_client.client.models.bulk_delete_action_bulk_task_instance_body import BulkDeleteActionBulkTaskInstanceBody
-from airflow_client.client.models.bulk_delete_action_bulk_task_instance_body_entities_inner import BulkDeleteActionBulkTaskInstanceBodyEntitiesInner
-from airflow_client.client.models.bulk_delete_action_connection_body import BulkDeleteActionConnectionBody
-from airflow_client.client.models.bulk_delete_action_pool_body import BulkDeleteActionPoolBody
-from airflow_client.client.models.bulk_delete_action_variable_body import BulkDeleteActionVariableBody
-from airflow_client.client.models.bulk_response import BulkResponse
-from airflow_client.client.models.bulk_task_instance_body import BulkTaskInstanceBody
-from airflow_client.client.models.bulk_update_action_bulk_task_instance_body import BulkUpdateActionBulkTaskInstanceBody
-from airflow_client.client.models.bulk_update_action_connection_body import BulkUpdateActionConnectionBody
-from airflow_client.client.models.bulk_update_action_pool_body import BulkUpdateActionPoolBody
-from airflow_client.client.models.bulk_update_action_variable_body import BulkUpdateActionVariableBody
-from airflow_client.client.models.clear_task_instances_body import ClearTaskInstancesBody
-from airflow_client.client.models.clear_task_instances_body_task_ids_inner import ClearTaskInstancesBodyTaskIdsInner
-from airflow_client.client.models.config import Config
-from airflow_client.client.models.config_option import ConfigOption
-from airflow_client.client.models.config_section import ConfigSection
-from airflow_client.client.models.connection_body import ConnectionBody
-from airflow_client.client.models.connection_collection_response import ConnectionCollectionResponse
-from airflow_client.client.models.connection_response import ConnectionResponse
-from airflow_client.client.models.connection_test_response import ConnectionTestResponse
-from airflow_client.client.models.content import Content
-from airflow_client.client.models.create_asset_events_body import CreateAssetEventsBody
-from airflow_client.client.models.dag_collection_response import DAGCollectionResponse
-from airflow_client.client.models.dag_details_response import DAGDetailsResponse
-from airflow_client.client.models.dag_patch_body import DAGPatchBody
-from airflow_client.client.models.dag_response import DAGResponse
-from airflow_client.client.models.dag_run_clear_body import DAGRunClearBody
-from airflow_client.client.models.dag_run_collection_response import DAGRunCollectionResponse
-from airflow_client.client.models.dag_run_patch_body import DAGRunPatchBody
-from airflow_client.client.models.dag_run_patch_states import DAGRunPatchStates
-from airflow_client.client.models.dag_run_response import DAGRunResponse
-from airflow_client.client.models.dag_runs_batch_body import DAGRunsBatchBody
-from airflow_client.client.models.dag_source_response import DAGSourceResponse
-from airflow_client.client.models.dag_tag_collection_response import DAGTagCollectionResponse
-from airflow_client.client.models.dag_version_collection_response import DAGVersionCollectionResponse
-from airflow_client.client.models.dag_warning_collection_response import DAGWarningCollectionResponse
-from airflow_client.client.models.dag_warning_response import DAGWarningResponse
-from airflow_client.client.models.dag_processor_info_response import DagProcessorInfoResponse
-from airflow_client.client.models.dag_run_asset_reference import DagRunAssetReference
-from airflow_client.client.models.dag_run_state import DagRunState
-from airflow_client.client.models.dag_run_triggered_by_type import DagRunTriggeredByType
-from airflow_client.client.models.dag_run_type import DagRunType
-from airflow_client.client.models.dag_schedule_asset_reference import DagScheduleAssetReference
-from airflow_client.client.models.dag_stats_collection_response import DagStatsCollectionResponse
-from airflow_client.client.models.dag_stats_response import DagStatsResponse
-from airflow_client.client.models.dag_stats_state_response import DagStatsStateResponse
-from airflow_client.client.models.dag_tag_response import DagTagResponse
-from airflow_client.client.models.dag_version_response import DagVersionResponse
-from airflow_client.client.models.dag_warning_type import DagWarningType
-from airflow_client.client.models.detail import Detail
-from airflow_client.client.models.dry_run_backfill_collection_response import DryRunBackfillCollectionResponse
-from airflow_client.client.models.dry_run_backfill_response import DryRunBackfillResponse
-from airflow_client.client.models.event_log_collection_response import EventLogCollectionResponse
-from airflow_client.client.models.event_log_response import EventLogResponse
-from airflow_client.client.models.external_log_url_response import ExternalLogUrlResponse
-from airflow_client.client.models.external_view_response import ExternalViewResponse
-from airflow_client.client.models.extra_link_collection_response import ExtraLinkCollectionResponse
-from airflow_client.client.models.fast_api_app_response import FastAPIAppResponse
-from airflow_client.client.models.fast_api_root_middleware_response import FastAPIRootMiddlewareResponse
-from airflow_client.client.models.hitl_detail import HITLDetail
-from airflow_client.client.models.hitl_detail_collection import HITLDetailCollection
-from airflow_client.client.models.hitl_detail_response import HITLDetailResponse
-from airflow_client.client.models.hitl_user import HITLUser
-from airflow_client.client.models.http_exception_response import HTTPExceptionResponse
-from airflow_client.client.models.http_validation_error import HTTPValidationError
-from airflow_client.client.models.health_info_response import HealthInfoResponse
-from airflow_client.client.models.import_error_collection_response import ImportErrorCollectionResponse
-from airflow_client.client.models.import_error_response import ImportErrorResponse
-from airflow_client.client.models.job_collection_response import JobCollectionResponse
-from airflow_client.client.models.job_response import JobResponse
-from airflow_client.client.models.last_asset_event_response import LastAssetEventResponse
-from airflow_client.client.models.patch_task_instance_body import PatchTaskInstanceBody
-from airflow_client.client.models.plugin_collection_response import PluginCollectionResponse
-from airflow_client.client.models.plugin_import_error_collection_response import PluginImportErrorCollectionResponse
-from airflow_client.client.models.plugin_import_error_response import PluginImportErrorResponse
-from airflow_client.client.models.plugin_response import PluginResponse
-from airflow_client.client.models.pool_body import PoolBody
-from airflow_client.client.models.pool_collection_response import PoolCollectionResponse
-from airflow_client.client.models.pool_patch_body import PoolPatchBody
-from airflow_client.client.models.pool_response import PoolResponse
-from airflow_client.client.models.provider_collection_response import ProviderCollectionResponse
-from airflow_client.client.models.provider_response import ProviderResponse
-from airflow_client.client.models.queued_event_collection_response import QueuedEventCollectionResponse
-from airflow_client.client.models.queued_event_response import QueuedEventResponse
-from airflow_client.client.models.react_app_response import ReactAppResponse
-from airflow_client.client.models.reprocess_behavior import ReprocessBehavior
-from airflow_client.client.models.response_clear_dag_run import ResponseClearDagRun
-from airflow_client.client.models.response_get_xcom_entry import ResponseGetXcomEntry
-from airflow_client.client.models.scheduler_info_response import SchedulerInfoResponse
-from airflow_client.client.models.structured_log_message import StructuredLogMessage
-from airflow_client.client.models.task_collection_response import TaskCollectionResponse
-from airflow_client.client.models.task_dependency_collection_response import TaskDependencyCollectionResponse
-from airflow_client.client.models.task_dependency_response import TaskDependencyResponse
-from airflow_client.client.models.task_inlet_asset_reference import TaskInletAssetReference
-from airflow_client.client.models.task_instance_collection_response import TaskInstanceCollectionResponse
-from airflow_client.client.models.task_instance_history_collection_response import TaskInstanceHistoryCollectionResponse
-from airflow_client.client.models.task_instance_history_response import TaskInstanceHistoryResponse
-from airflow_client.client.models.task_instance_response import TaskInstanceResponse
-from airflow_client.client.models.task_instance_state import TaskInstanceState
-from airflow_client.client.models.task_instances_batch_body import TaskInstancesBatchBody
-from airflow_client.client.models.task_instances_log_response import TaskInstancesLogResponse
-from airflow_client.client.models.task_outlet_asset_reference import TaskOutletAssetReference
-from airflow_client.client.models.task_response import TaskResponse
-from airflow_client.client.models.time_delta import TimeDelta
-from airflow_client.client.models.trigger_dag_run_post_body import TriggerDAGRunPostBody
-from airflow_client.client.models.trigger_response import TriggerResponse
-from airflow_client.client.models.triggerer_info_response import TriggererInfoResponse
-from airflow_client.client.models.update_hitl_detail_payload import UpdateHITLDetailPayload
-from airflow_client.client.models.validation_error import ValidationError
-from airflow_client.client.models.validation_error_loc_inner import ValidationErrorLocInner
-from airflow_client.client.models.value import Value
-from airflow_client.client.models.variable_body import VariableBody
-from airflow_client.client.models.variable_collection_response import VariableCollectionResponse
-from airflow_client.client.models.variable_response import VariableResponse
-from airflow_client.client.models.version_info import VersionInfo
-from airflow_client.client.models.x_com_collection_response import XComCollectionResponse
-from airflow_client.client.models.x_com_create_body import XComCreateBody
-from airflow_client.client.models.x_com_response import XComResponse
-from airflow_client.client.models.x_com_response_native import XComResponseNative
-from airflow_client.client.models.x_com_response_string import XComResponseString
-from airflow_client.client.models.x_com_update_body import XComUpdateBody
+from airflow_client.client.models.app_builder_menu_item_response import AppBuilderMenuItemResponse as AppBuilderMenuItemResponse
+from airflow_client.client.models.app_builder_view_response import AppBuilderViewResponse as AppBuilderViewResponse
+from airflow_client.client.models.asset_alias_collection_response import AssetAliasCollectionResponse as AssetAliasCollectionResponse
+from airflow_client.client.models.asset_alias_response import AssetAliasResponse as AssetAliasResponse
+from airflow_client.client.models.asset_collection_response import AssetCollectionResponse as AssetCollectionResponse
+from airflow_client.client.models.asset_event_collection_response import AssetEventCollectionResponse as AssetEventCollectionResponse
+from airflow_client.client.models.asset_event_response import AssetEventResponse as AssetEventResponse
+from airflow_client.client.models.asset_response import AssetResponse as AssetResponse
+from airflow_client.client.models.backfill_collection_response import BackfillCollectionResponse as BackfillCollectionResponse
+from airflow_client.client.models.backfill_post_body import BackfillPostBody as BackfillPostBody
+from airflow_client.client.models.backfill_response import BackfillResponse as BackfillResponse
+from airflow_client.client.models.base_info_response import BaseInfoResponse as BaseInfoResponse
+from airflow_client.client.models.bulk_action_not_on_existence import BulkActionNotOnExistence as BulkActionNotOnExistence
+from airflow_client.client.models.bulk_action_on_existence import BulkActionOnExistence as BulkActionOnExistence
+from airflow_client.client.models.bulk_action_response import BulkActionResponse as BulkActionResponse
+from airflow_client.client.models.bulk_body_bulk_task_instance_body import BulkBodyBulkTaskInstanceBody as BulkBodyBulkTaskInstanceBody
+from airflow_client.client.models.bulk_body_bulk_task_instance_body_actions_inner import BulkBodyBulkTaskInstanceBodyActionsInner as BulkBodyBulkTaskInstanceBodyActionsInner
+from airflow_client.client.models.bulk_body_connection_body import BulkBodyConnectionBody as BulkBodyConnectionBody
+from airflow_client.client.models.bulk_body_connection_body_actions_inner import BulkBodyConnectionBodyActionsInner as BulkBodyConnectionBodyActionsInner
+from airflow_client.client.models.bulk_body_pool_body import BulkBodyPoolBody as BulkBodyPoolBody
+from airflow_client.client.models.bulk_body_pool_body_actions_inner import BulkBodyPoolBodyActionsInner as BulkBodyPoolBodyActionsInner
+from airflow_client.client.models.bulk_body_variable_body import BulkBodyVariableBody as BulkBodyVariableBody
+from airflow_client.client.models.bulk_body_variable_body_actions_inner import BulkBodyVariableBodyActionsInner as BulkBodyVariableBodyActionsInner
+from airflow_client.client.models.bulk_create_action_bulk_task_instance_body import BulkCreateActionBulkTaskInstanceBody as BulkCreateActionBulkTaskInstanceBody
+from airflow_client.client.models.bulk_create_action_connection_body import BulkCreateActionConnectionBody as BulkCreateActionConnectionBody
+from airflow_client.client.models.bulk_create_action_pool_body import BulkCreateActionPoolBody as BulkCreateActionPoolBody
+from airflow_client.client.models.bulk_create_action_variable_body import BulkCreateActionVariableBody as BulkCreateActionVariableBody
+from airflow_client.client.models.bulk_delete_action_bulk_task_instance_body import BulkDeleteActionBulkTaskInstanceBody as BulkDeleteActionBulkTaskInstanceBody
+from airflow_client.client.models.bulk_delete_action_bulk_task_instance_body_entities_inner import BulkDeleteActionBulkTaskInstanceBodyEntitiesInner as BulkDeleteActionBulkTaskInstanceBodyEntitiesInner
+from airflow_client.client.models.bulk_delete_action_connection_body import BulkDeleteActionConnectionBody as BulkDeleteActionConnectionBody
+from airflow_client.client.models.bulk_delete_action_pool_body import BulkDeleteActionPoolBody as BulkDeleteActionPoolBody
+from airflow_client.client.models.bulk_delete_action_variable_body import BulkDeleteActionVariableBody as BulkDeleteActionVariableBody
+from airflow_client.client.models.bulk_response import BulkResponse as BulkResponse
+from airflow_client.client.models.bulk_task_instance_body import BulkTaskInstanceBody as BulkTaskInstanceBody
+from airflow_client.client.models.bulk_update_action_bulk_task_instance_body import BulkUpdateActionBulkTaskInstanceBody as BulkUpdateActionBulkTaskInstanceBody
+from airflow_client.client.models.bulk_update_action_connection_body import BulkUpdateActionConnectionBody as BulkUpdateActionConnectionBody
+from airflow_client.client.models.bulk_update_action_pool_body import BulkUpdateActionPoolBody as BulkUpdateActionPoolBody
+from airflow_client.client.models.bulk_update_action_variable_body import BulkUpdateActionVariableBody as BulkUpdateActionVariableBody
+from airflow_client.client.models.clear_task_instances_body import ClearTaskInstancesBody as ClearTaskInstancesBody
+from airflow_client.client.models.clear_task_instances_body_task_ids_inner import ClearTaskInstancesBodyTaskIdsInner as ClearTaskInstancesBodyTaskIdsInner
+from airflow_client.client.models.config import Config as Config
+from airflow_client.client.models.config_option import ConfigOption as ConfigOption
+from airflow_client.client.models.config_section import ConfigSection as ConfigSection
+from airflow_client.client.models.connection_body import ConnectionBody as ConnectionBody
+from airflow_client.client.models.connection_collection_response import ConnectionCollectionResponse as ConnectionCollectionResponse
+from airflow_client.client.models.connection_response import ConnectionResponse as ConnectionResponse
+from airflow_client.client.models.connection_test_response import ConnectionTestResponse as ConnectionTestResponse
+from airflow_client.client.models.content import Content as Content
+from airflow_client.client.models.create_asset_events_body import CreateAssetEventsBody as CreateAssetEventsBody
+from airflow_client.client.models.dag_collection_response import DAGCollectionResponse as DAGCollectionResponse
+from airflow_client.client.models.dag_details_response import DAGDetailsResponse as DAGDetailsResponse
+from airflow_client.client.models.dag_patch_body import DAGPatchBody as DAGPatchBody
+from airflow_client.client.models.dag_response import DAGResponse as DAGResponse
+from airflow_client.client.models.dag_run_clear_body import DAGRunClearBody as DAGRunClearBody
+from airflow_client.client.models.dag_run_collection_response import DAGRunCollectionResponse as DAGRunCollectionResponse
+from airflow_client.client.models.dag_run_patch_body import DAGRunPatchBody as DAGRunPatchBody
+from airflow_client.client.models.dag_run_patch_states import DAGRunPatchStates as DAGRunPatchStates
+from airflow_client.client.models.dag_run_response import DAGRunResponse as DAGRunResponse
+from airflow_client.client.models.dag_runs_batch_body import DAGRunsBatchBody as DAGRunsBatchBody
+from airflow_client.client.models.dag_source_response import DAGSourceResponse as DAGSourceResponse
+from airflow_client.client.models.dag_tag_collection_response import DAGTagCollectionResponse as DAGTagCollectionResponse
+from airflow_client.client.models.dag_version_collection_response import DAGVersionCollectionResponse as DAGVersionCollectionResponse
+from airflow_client.client.models.dag_warning_collection_response import DAGWarningCollectionResponse as DAGWarningCollectionResponse
+from airflow_client.client.models.dag_warning_response import DAGWarningResponse as DAGWarningResponse
+from airflow_client.client.models.dag_processor_info_response import DagProcessorInfoResponse as DagProcessorInfoResponse
+from airflow_client.client.models.dag_run_asset_reference import DagRunAssetReference as DagRunAssetReference
+from airflow_client.client.models.dag_run_state import DagRunState as DagRunState
+from airflow_client.client.models.dag_run_triggered_by_type import DagRunTriggeredByType as DagRunTriggeredByType
+from airflow_client.client.models.dag_run_type import DagRunType as DagRunType
+from airflow_client.client.models.dag_schedule_asset_reference import DagScheduleAssetReference as DagScheduleAssetReference
+from airflow_client.client.models.dag_stats_collection_response import DagStatsCollectionResponse as DagStatsCollectionResponse
+from airflow_client.client.models.dag_stats_response import DagStatsResponse as DagStatsResponse
+from airflow_client.client.models.dag_stats_state_response import DagStatsStateResponse as DagStatsStateResponse
+from airflow_client.client.models.dag_tag_response import DagTagResponse as DagTagResponse
+from airflow_client.client.models.dag_version_response import DagVersionResponse as DagVersionResponse
+from airflow_client.client.models.dag_warning_type import DagWarningType as DagWarningType
+from airflow_client.client.models.detail import Detail as Detail
+from airflow_client.client.models.dry_run_backfill_collection_response import DryRunBackfillCollectionResponse as DryRunBackfillCollectionResponse
+from airflow_client.client.models.dry_run_backfill_response import DryRunBackfillResponse as DryRunBackfillResponse
+from airflow_client.client.models.event_log_collection_response import EventLogCollectionResponse as EventLogCollectionResponse
+from airflow_client.client.models.event_log_response import EventLogResponse as EventLogResponse
+from airflow_client.client.models.external_log_url_response import ExternalLogUrlResponse as ExternalLogUrlResponse
+from airflow_client.client.models.external_view_response import ExternalViewResponse as ExternalViewResponse
+from airflow_client.client.models.extra_link_collection_response import ExtraLinkCollectionResponse as ExtraLinkCollectionResponse
+from airflow_client.client.models.fast_api_app_response import FastAPIAppResponse as FastAPIAppResponse
+from airflow_client.client.models.fast_api_root_middleware_response import FastAPIRootMiddlewareResponse as FastAPIRootMiddlewareResponse
+from airflow_client.client.models.hitl_detail import HITLDetail as HITLDetail
+from airflow_client.client.models.hitl_detail_collection import HITLDetailCollection as HITLDetailCollection
+from airflow_client.client.models.hitl_detail_response import HITLDetailResponse as HITLDetailResponse
+from airflow_client.client.models.hitl_user import HITLUser as HITLUser
+from airflow_client.client.models.http_exception_response import HTTPExceptionResponse as HTTPExceptionResponse
+from airflow_client.client.models.http_validation_error import HTTPValidationError as HTTPValidationError
+from airflow_client.client.models.health_info_response import HealthInfoResponse as HealthInfoResponse
+from airflow_client.client.models.import_error_collection_response import ImportErrorCollectionResponse as ImportErrorCollectionResponse
+from airflow_client.client.models.import_error_response import ImportErrorResponse as ImportErrorResponse
+from airflow_client.client.models.job_collection_response import JobCollectionResponse as JobCollectionResponse
+from airflow_client.client.models.job_response import JobResponse as JobResponse
+from airflow_client.client.models.last_asset_event_response import LastAssetEventResponse as LastAssetEventResponse
+from airflow_client.client.models.patch_task_instance_body import PatchTaskInstanceBody as PatchTaskInstanceBody
+from airflow_client.client.models.plugin_collection_response import PluginCollectionResponse as PluginCollectionResponse
+from airflow_client.client.models.plugin_import_error_collection_response import PluginImportErrorCollectionResponse as PluginImportErrorCollectionResponse
+from airflow_client.client.models.plugin_import_error_response import PluginImportErrorResponse as PluginImportErrorResponse
+from airflow_client.client.models.plugin_response import PluginResponse as PluginResponse
+from airflow_client.client.models.pool_body import PoolBody as PoolBody
+from airflow_client.client.models.pool_collection_response import PoolCollectionResponse as PoolCollectionResponse
+from airflow_client.client.models.pool_patch_body import PoolPatchBody as PoolPatchBody
+from airflow_client.client.models.pool_response import PoolResponse as PoolResponse
+from airflow_client.client.models.provider_collection_response import ProviderCollectionResponse as ProviderCollectionResponse
+from airflow_client.client.models.provider_response import ProviderResponse as ProviderResponse
+from airflow_client.client.models.queued_event_collection_response import QueuedEventCollectionResponse as QueuedEventCollectionResponse
+from airflow_client.client.models.queued_event_response import QueuedEventResponse as QueuedEventResponse
+from airflow_client.client.models.react_app_response import ReactAppResponse as ReactAppResponse
+from airflow_client.client.models.reprocess_behavior import ReprocessBehavior as ReprocessBehavior
+from airflow_client.client.models.response_clear_dag_run import ResponseClearDagRun as ResponseClearDagRun
+from airflow_client.client.models.response_get_xcom_entry import ResponseGetXcomEntry as ResponseGetXcomEntry
+from airflow_client.client.models.scheduler_info_response import SchedulerInfoResponse as SchedulerInfoResponse
+from airflow_client.client.models.structured_log_message import StructuredLogMessage as StructuredLogMessage
+from airflow_client.client.models.task_collection_response import TaskCollectionResponse as TaskCollectionResponse
+from airflow_client.client.models.task_dependency_collection_response import TaskDependencyCollectionResponse as TaskDependencyCollectionResponse
+from airflow_client.client.models.task_dependency_response import TaskDependencyResponse as TaskDependencyResponse
+from airflow_client.client.models.task_inlet_asset_reference import TaskInletAssetReference as TaskInletAssetReference
+from airflow_client.client.models.task_instance_collection_response import TaskInstanceCollectionResponse as TaskInstanceCollectionResponse
+from airflow_client.client.models.task_instance_history_collection_response import TaskInstanceHistoryCollectionResponse as TaskInstanceHistoryCollectionResponse
+from airflow_client.client.models.task_instance_history_response import TaskInstanceHistoryResponse as TaskInstanceHistoryResponse
+from airflow_client.client.models.task_instance_response import TaskInstanceResponse as TaskInstanceResponse
+from airflow_client.client.models.task_instance_state import TaskInstanceState as TaskInstanceState
+from airflow_client.client.models.task_instances_batch_body import TaskInstancesBatchBody as TaskInstancesBatchBody
+from airflow_client.client.models.task_instances_log_response import TaskInstancesLogResponse as TaskInstancesLogResponse
+from airflow_client.client.models.task_outlet_asset_reference import TaskOutletAssetReference as TaskOutletAssetReference
+from airflow_client.client.models.task_response import TaskResponse as TaskResponse
+from airflow_client.client.models.time_delta import TimeDelta as TimeDelta
+from airflow_client.client.models.trigger_dag_run_post_body import TriggerDAGRunPostBody as TriggerDAGRunPostBody
+from airflow_client.client.models.trigger_response import TriggerResponse as TriggerResponse
+from airflow_client.client.models.triggerer_info_response import TriggererInfoResponse as TriggererInfoResponse
+from airflow_client.client.models.update_hitl_detail_payload import UpdateHITLDetailPayload as UpdateHITLDetailPayload
+from airflow_client.client.models.validation_error import ValidationError as ValidationError
+from airflow_client.client.models.validation_error_loc_inner import ValidationErrorLocInner as ValidationErrorLocInner
+from airflow_client.client.models.value import Value as Value
+from airflow_client.client.models.variable_body import VariableBody as VariableBody
+from airflow_client.client.models.variable_collection_response import VariableCollectionResponse as VariableCollectionResponse
+from airflow_client.client.models.variable_response import VariableResponse as VariableResponse
+from airflow_client.client.models.version_info import VersionInfo as VersionInfo
+from airflow_client.client.models.x_com_collection_response import XComCollectionResponse as XComCollectionResponse
+from airflow_client.client.models.x_com_create_body import XComCreateBody as XComCreateBody
+from airflow_client.client.models.x_com_response import XComResponse as XComResponse
+from airflow_client.client.models.x_com_response_native import XComResponseNative as XComResponseNative
+from airflow_client.client.models.x_com_response_string import XComResponseString as XComResponseString
+from airflow_client.client.models.x_com_update_body import XComUpdateBody as XComUpdateBody
+
diff --git a/airflow_client/client/api/dag_api.py b/airflow_client/client/api/dag_api.py
index b3df75a..46b7a32 100644
--- a/airflow_client/client/api/dag_api.py
+++ b/airflow_client/client/api/dag_api.py
@@ -25,7 +25,6 @@
 from airflow_client.client.models.dag_patch_body import DAGPatchBody
 from airflow_client.client.models.dag_response import DAGResponse
 from airflow_client.client.models.dag_tag_collection_response import DAGTagCollectionResponse
-from airflow_client.client.models.dag_run_state import DagRunState
 
 from airflow_client.client.api_client import ApiClient, RequestSerialized
 from airflow_client.client.api_response import ApiResponse
@@ -1488,7 +1487,7 @@
         exclude_stale: Optional[StrictBool] = None,
         paused: Optional[StrictBool] = None,
         has_import_errors: Annotated[Optional[StrictBool], Field(description="Filter Dags by having import errors. Only Dags that have been successfully loaded before will be returned.")] = None,
-        last_dag_run_state: Optional[DagRunState] = None,
+        last_dag_run_state: Optional[Any] = None,
         bundle_name: Optional[StrictStr] = None,
         bundle_version: Optional[StrictStr] = None,
         has_asset_schedule: Annotated[Optional[StrictBool], Field(description="Filter Dags with asset-based scheduling")] = None,
@@ -1658,7 +1657,7 @@
         exclude_stale: Optional[StrictBool] = None,
         paused: Optional[StrictBool] = None,
         has_import_errors: Annotated[Optional[StrictBool], Field(description="Filter Dags by having import errors. Only Dags that have been successfully loaded before will be returned.")] = None,
-        last_dag_run_state: Optional[DagRunState] = None,
+        last_dag_run_state: Optional[Any] = None,
         bundle_name: Optional[StrictStr] = None,
         bundle_version: Optional[StrictStr] = None,
         has_asset_schedule: Annotated[Optional[StrictBool], Field(description="Filter Dags with asset-based scheduling")] = None,
@@ -1828,7 +1827,7 @@
         exclude_stale: Optional[StrictBool] = None,
         paused: Optional[StrictBool] = None,
         has_import_errors: Annotated[Optional[StrictBool], Field(description="Filter Dags by having import errors. Only Dags that have been successfully loaded before will be returned.")] = None,
-        last_dag_run_state: Optional[DagRunState] = None,
+        last_dag_run_state: Optional[Any] = None,
         bundle_name: Optional[StrictStr] = None,
         bundle_version: Optional[StrictStr] = None,
         has_asset_schedule: Annotated[Optional[StrictBool], Field(description="Filter Dags with asset-based scheduling")] = None,
diff --git a/airflow_client/client/api/dag_warning_api.py b/airflow_client/client/api/dag_warning_api.py
index 8fedc8e..2b0a7d4 100644
--- a/airflow_client/client/api/dag_warning_api.py
+++ b/airflow_client/client/api/dag_warning_api.py
@@ -17,10 +17,9 @@
 from typing_extensions import Annotated
 
 from pydantic import Field, StrictStr
-from typing import List, Optional
+from typing import Any, List, Optional
 from typing_extensions import Annotated
 from airflow_client.client.models.dag_warning_collection_response import DAGWarningCollectionResponse
-from airflow_client.client.models.dag_warning_type import DagWarningType
 
 from airflow_client.client.api_client import ApiClient, RequestSerialized
 from airflow_client.client.api_response import ApiResponse
@@ -44,7 +43,7 @@
     def list_dag_warnings(
         self,
         dag_id: Optional[StrictStr] = None,
-        warning_type: Optional[DagWarningType] = None,
+        warning_type: Optional[Any] = None,
         limit: Optional[Annotated[int, Field(strict=True, ge=0)]] = None,
         offset: Optional[Annotated[int, Field(strict=True, ge=0)]] = None,
         order_by: Annotated[Optional[List[StrictStr]], Field(description="Attributes to order by, multi criteria sort is supported. Prefix with `-` for descending order. Supported attributes: `dag_id, warning_type, message, timestamp`")] = None,
@@ -130,7 +129,7 @@
     def list_dag_warnings_with_http_info(
         self,
         dag_id: Optional[StrictStr] = None,
-        warning_type: Optional[DagWarningType] = None,
+        warning_type: Optional[Any] = None,
         limit: Optional[Annotated[int, Field(strict=True, ge=0)]] = None,
         offset: Optional[Annotated[int, Field(strict=True, ge=0)]] = None,
         order_by: Annotated[Optional[List[StrictStr]], Field(description="Attributes to order by, multi criteria sort is supported. Prefix with `-` for descending order. Supported attributes: `dag_id, warning_type, message, timestamp`")] = None,
@@ -216,7 +215,7 @@
     def list_dag_warnings_without_preload_content(
         self,
         dag_id: Optional[StrictStr] = None,
-        warning_type: Optional[DagWarningType] = None,
+        warning_type: Optional[Any] = None,
         limit: Optional[Annotated[int, Field(strict=True, ge=0)]] = None,
         offset: Optional[Annotated[int, Field(strict=True, ge=0)]] = None,
         order_by: Annotated[Optional[List[StrictStr]], Field(description="Attributes to order by, multi criteria sort is supported. Prefix with `-` for descending order. Supported attributes: `dag_id, warning_type, message, timestamp`")] = None,
diff --git a/airflow_client/client/api/task_instance_api.py b/airflow_client/client/api/task_instance_api.py
index 2557e26..1d87eb2 100644
--- a/airflow_client/client/api/task_instance_api.py
+++ b/airflow_client/client/api/task_instance_api.py
@@ -6357,6 +6357,7 @@
         duration_lte: Optional[Union[StrictFloat, StrictInt]] = None,
         duration_lt: Optional[Union[StrictFloat, StrictInt]] = None,
         task_display_name_pattern: Annotated[Optional[StrictStr], Field(description="SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported.")] = None,
+        task_group_id: Annotated[Optional[StrictStr], Field(description="Filter by exact task group ID. Returns all tasks within the specified task group.")] = None,
         state: Optional[List[StrictStr]] = None,
         pool: Optional[List[StrictStr]] = None,
         queue: Optional[List[StrictStr]] = None,
@@ -6441,6 +6442,8 @@
         :type duration_lt: float
         :param task_display_name_pattern: SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported.
         :type task_display_name_pattern: str
+        :param task_group_id: Filter by exact task group ID. Returns all tasks within the specified task group.
+        :type task_group_id: str
         :param state:
         :type state: List[str]
         :param pool:
@@ -6514,6 +6517,7 @@
             duration_lte=duration_lte,
             duration_lt=duration_lt,
             task_display_name_pattern=task_display_name_pattern,
+            task_group_id=task_group_id,
             state=state,
             pool=pool,
             queue=queue,
@@ -6580,6 +6584,7 @@
         duration_lte: Optional[Union[StrictFloat, StrictInt]] = None,
         duration_lt: Optional[Union[StrictFloat, StrictInt]] = None,
         task_display_name_pattern: Annotated[Optional[StrictStr], Field(description="SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported.")] = None,
+        task_group_id: Annotated[Optional[StrictStr], Field(description="Filter by exact task group ID. Returns all tasks within the specified task group.")] = None,
         state: Optional[List[StrictStr]] = None,
         pool: Optional[List[StrictStr]] = None,
         queue: Optional[List[StrictStr]] = None,
@@ -6664,6 +6669,8 @@
         :type duration_lt: float
         :param task_display_name_pattern: SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported.
         :type task_display_name_pattern: str
+        :param task_group_id: Filter by exact task group ID. Returns all tasks within the specified task group.
+        :type task_group_id: str
         :param state:
         :type state: List[str]
         :param pool:
@@ -6737,6 +6744,7 @@
             duration_lte=duration_lte,
             duration_lt=duration_lt,
             task_display_name_pattern=task_display_name_pattern,
+            task_group_id=task_group_id,
             state=state,
             pool=pool,
             queue=queue,
@@ -6803,6 +6811,7 @@
         duration_lte: Optional[Union[StrictFloat, StrictInt]] = None,
         duration_lt: Optional[Union[StrictFloat, StrictInt]] = None,
         task_display_name_pattern: Annotated[Optional[StrictStr], Field(description="SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported.")] = None,
+        task_group_id: Annotated[Optional[StrictStr], Field(description="Filter by exact task group ID. Returns all tasks within the specified task group.")] = None,
         state: Optional[List[StrictStr]] = None,
         pool: Optional[List[StrictStr]] = None,
         queue: Optional[List[StrictStr]] = None,
@@ -6887,6 +6896,8 @@
         :type duration_lt: float
         :param task_display_name_pattern: SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported.
         :type task_display_name_pattern: str
+        :param task_group_id: Filter by exact task group ID. Returns all tasks within the specified task group.
+        :type task_group_id: str
         :param state:
         :type state: List[str]
         :param pool:
@@ -6960,6 +6971,7 @@
             duration_lte=duration_lte,
             duration_lt=duration_lt,
             task_display_name_pattern=task_display_name_pattern,
+            task_group_id=task_group_id,
             state=state,
             pool=pool,
             queue=queue,
@@ -7021,6 +7033,7 @@
         duration_lte,
         duration_lt,
         task_display_name_pattern,
+        task_group_id,
         state,
         pool,
         queue,
@@ -7351,6 +7364,10 @@
             
             _query_params.append(('task_display_name_pattern', task_display_name_pattern))
             
+        if task_group_id is not None:
+            
+            _query_params.append(('task_group_id', task_group_id))
+            
         if state is not None:
             
             _query_params.append(('state', state))
diff --git a/airflow_client/client/api_client.py b/airflow_client/client/api_client.py
index a6404fb..de136cc 100644
--- a/airflow_client/client/api_client.py
+++ b/airflow_client/client/api_client.py
@@ -21,6 +21,7 @@
 import os
 import re
 import tempfile
+import uuid
 
 from urllib.parse import quote
 from typing import Tuple, Optional, List, Dict, Union
@@ -90,7 +91,7 @@
             self.default_headers[header_name] = header_value
         self.cookie = cookie
         # Set default User-Agent.
-        self.user_agent = 'OpenAPI-Generator/3.1.4/python'
+        self.user_agent = 'OpenAPI-Generator/3.1.6/python'
         self.client_side_validation = configuration.client_side_validation
 
     def __enter__(self):
@@ -311,7 +312,7 @@
                 return_data = self.__deserialize_file(response_data)
             elif response_type is not None:
                 match = None
-                content_type = response_data.getheader('content-type')
+                content_type = response_data.headers.get('content-type')
                 if content_type is not None:
                     match = re.search(r"charset=([a-zA-Z\-\d]+)[\s;]?", content_type)
                 encoding = match.group(1) if match else "utf-8"
@@ -328,7 +329,7 @@
         return ApiResponse(
             status_code = response_data.status,
             data = return_data,
-            headers = response_data.getheaders(),
+            headers = response_data.headers,
             raw_data = response_data.data
         )
 
@@ -356,6 +357,8 @@
             return obj.get_secret_value()
         elif isinstance(obj, self.PRIMITIVE_TYPES):
             return obj
+        elif isinstance(obj, uuid.UUID):
+            return str(obj)
         elif isinstance(obj, list):
             return [
                 self.sanitize_for_serialization(sub_obj) for sub_obj in obj
@@ -382,6 +385,10 @@
             else:
                 obj_dict = obj.__dict__
 
+        if isinstance(obj_dict, list):
+            # here we handle instances that can either be a list or something else, and only became a real list by calling to_dict()
+            return self.sanitize_for_serialization(obj_dict)
+
         return {
             key: self.sanitize_for_serialization(val)
             for key, val in obj_dict.items()
@@ -404,7 +411,7 @@
                 data = json.loads(response_text)
             except ValueError:
                 data = response_text
-        elif re.match(r'^application/(json|[\w!#$&.+-^_]+\+json)\s*(;|$)', content_type, re.IGNORECASE):
+        elif re.match(r'^application/(json|[\w!#$&.+\-^_]+\+json)\s*(;|$)', content_type, re.IGNORECASE):
             if response_text == "":
                 data = ""
             else:
@@ -453,13 +460,13 @@
 
         if klass in self.PRIMITIVE_TYPES:
             return self.__deserialize_primitive(data, klass)
-        elif klass == object:
+        elif klass is object:
             return self.__deserialize_object(data)
-        elif klass == datetime.date:
+        elif klass is datetime.date:
             return self.__deserialize_date(data)
-        elif klass == datetime.datetime:
+        elif klass is datetime.datetime:
             return self.__deserialize_datetime(data)
-        elif klass == decimal.Decimal:
+        elif klass is decimal.Decimal:
             return decimal.Decimal(data)
         elif issubclass(klass, Enum):
             return self.__deserialize_enum(data, klass)
@@ -694,7 +701,7 @@
         os.close(fd)
         os.remove(path)
 
-        content_disposition = response.getheader("Content-Disposition")
+        content_disposition = response.headers.get("Content-Disposition")
         if content_disposition:
             m = re.search(
                 r'filename=[\'"]?([^\'"\s]+)[\'"]?',
diff --git a/airflow_client/client/configuration.py b/airflow_client/client/configuration.py
index c815c17..13c9ae3 100644
--- a/airflow_client/client/configuration.py
+++ b/airflow_client/client/configuration.py
@@ -164,6 +164,8 @@
     :param retries: Number of retries for API requests.
     :param ca_cert_data: verify the peer using concatenated CA certificate data
       in PEM (str) or DER (bytes) format.
+    :param cert_file: the path to a client certificate file, for mTLS.
+    :param key_file: the path to a client key file, for mTLS.
 
     :Example:
     """
@@ -186,6 +188,8 @@
         ssl_ca_cert: Optional[str]=None,
         retries: Optional[int] = None,
         ca_cert_data: Optional[Union[str, bytes]] = None,
+        cert_file: Optional[str]=None,
+        key_file: Optional[str]=None,
         *,
         debug: Optional[bool] = None,
     ) -> None:
@@ -267,10 +271,10 @@
         """Set this to verify the peer using PEM (str) or DER (bytes)
            certificate data.
         """
-        self.cert_file = None
+        self.cert_file = cert_file
         """client certificate file
         """
-        self.key_file = None
+        self.key_file = key_file
         """client key file
         """
         self.assert_hostname = None
@@ -483,6 +487,7 @@
         password = ""
         if self.password is not None:
             password = self.password
+
         return urllib3.util.make_headers(
             basic_auth=username + ':' + password
         ).get('authorization')
@@ -518,7 +523,7 @@
                "OS: {env}\n"\
                "Python Version: {pyversion}\n"\
                "Version of the API: 2\n"\
-               "SDK Package Version: 3.1.4".\
+               "SDK Package Version: 3.1.6".\
                format(env=sys.platform, pyversion=sys.version)
 
     def get_host_settings(self) -> List[HostSetting]:
@@ -566,6 +571,7 @@
                 variable_name, variable['default_value'])
 
             if 'enum_values' in variable \
+                    and variable['enum_values'] \
                     and used_value not in variable['enum_values']:
                 raise ValueError(
                     "The variable `{0}` in the host URL has invalid value "
diff --git a/airflow_client/client/exceptions.py b/airflow_client/client/exceptions.py
index 6ceeca4..00bb2d3 100644
--- a/airflow_client/client/exceptions.py
+++ b/airflow_client/client/exceptions.py
@@ -128,7 +128,7 @@
                     self.body = http_resp.data.decode('utf-8')
                 except Exception:
                     pass
-            self.headers = http_resp.getheaders()
+            self.headers = http_resp.headers
 
     @classmethod
     def from_response(
@@ -169,8 +169,11 @@
             error_message += "HTTP response headers: {0}\n".format(
                 self.headers)
 
-        if self.data or self.body:
-            error_message += "HTTP response body: {0}\n".format(self.data or self.body)
+        if self.body:
+            error_message += "HTTP response body: {0}\n".format(self.body)
+
+        if self.data:
+            error_message += "HTTP response data: {0}\n".format(self.data)
 
         return error_message
 
diff --git a/airflow_client/client/models/__init__.py b/airflow_client/client/models/__init__.py
index c4ce5a7..c3b6d10 100644
--- a/airflow_client/client/models/__init__.py
+++ b/airflow_client/client/models/__init__.py
@@ -12,7 +12,6 @@
     Do not edit the class manually.
 """  # noqa: E501
 
-
 # import models into model package
 from airflow_client.client.models.app_builder_menu_item_response import AppBuilderMenuItemResponse
 from airflow_client.client.models.app_builder_view_response import AppBuilderViewResponse
@@ -162,3 +161,4 @@
 from airflow_client.client.models.x_com_response_native import XComResponseNative
 from airflow_client.client.models.x_com_response_string import XComResponseString
 from airflow_client.client.models.x_com_update_body import XComUpdateBody
+
diff --git a/airflow_client/client/models/app_builder_menu_item_response.py b/airflow_client/client/models/app_builder_menu_item_response.py
index 131955d..ef9d786 100644
--- a/airflow_client/client/models/app_builder_menu_item_response.py
+++ b/airflow_client/client/models/app_builder_menu_item_response.py
@@ -29,6 +29,7 @@
     category: Optional[StrictStr] = None
     href: StrictStr
     name: StrictStr
+    additional_properties: Dict[str, Any] = {}
     __properties: ClassVar[List[str]] = ["category", "href", "name"]
 
     model_config = ConfigDict(
@@ -61,8 +62,10 @@
         * `None` is only added to the output dict for nullable fields that
           were set at model initialization. Other fields with value `None`
           are ignored.
+        * Fields in `self.additional_properties` are added to the output dict.
         """
         excluded_fields: Set[str] = set([
+            "additional_properties",
         ])
 
         _dict = self.model_dump(
@@ -70,6 +73,11 @@
             exclude=excluded_fields,
             exclude_none=True,
         )
+        # puts key-value pairs in additional_properties in the top level
+        if self.additional_properties is not None:
+            for _key, _value in self.additional_properties.items():
+                _dict[_key] = _value
+
         return _dict
 
     @classmethod
@@ -86,6 +94,11 @@
             "href": obj.get("href"),
             "name": obj.get("name")
         })
+        # store additional fields in additional_properties
+        for _key in obj.keys():
+            if _key not in cls.__properties:
+                _obj.additional_properties[_key] = obj.get(_key)
+
         return _obj
 
 
diff --git a/airflow_client/client/models/app_builder_view_response.py b/airflow_client/client/models/app_builder_view_response.py
index 814a5bd..b3722ef 100644
--- a/airflow_client/client/models/app_builder_view_response.py
+++ b/airflow_client/client/models/app_builder_view_response.py
@@ -30,6 +30,7 @@
     label: Optional[StrictStr] = None
     name: Optional[StrictStr] = None
     view: Optional[StrictStr] = None
+    additional_properties: Dict[str, Any] = {}
     __properties: ClassVar[List[str]] = ["category", "label", "name", "view"]
 
     model_config = ConfigDict(
@@ -62,8 +63,10 @@
         * `None` is only added to the output dict for nullable fields that
           were set at model initialization. Other fields with value `None`
           are ignored.
+        * Fields in `self.additional_properties` are added to the output dict.
         """
         excluded_fields: Set[str] = set([
+            "additional_properties",
         ])
 
         _dict = self.model_dump(
@@ -71,6 +74,11 @@
             exclude=excluded_fields,
             exclude_none=True,
         )
+        # puts key-value pairs in additional_properties in the top level
+        if self.additional_properties is not None:
+            for _key, _value in self.additional_properties.items():
+                _dict[_key] = _value
+
         return _dict
 
     @classmethod
@@ -88,6 +96,11 @@
             "name": obj.get("name"),
             "view": obj.get("view")
         })
+        # store additional fields in additional_properties
+        for _key in obj.keys():
+            if _key not in cls.__properties:
+                _obj.additional_properties[_key] = obj.get(_key)
+
         return _obj
 
 
diff --git a/airflow_client/client/models/backfill_post_body.py b/airflow_client/client/models/backfill_post_body.py
index 14f8942..788a69d 100644
--- a/airflow_client/client/models/backfill_post_body.py
+++ b/airflow_client/client/models/backfill_post_body.py
@@ -34,9 +34,10 @@
     max_active_runs: Optional[StrictInt] = 10
     reprocess_behavior: Optional[ReprocessBehavior] = None
     run_backwards: Optional[StrictBool] = False
+    run_on_latest_version: Optional[StrictBool] = True
     to_date: datetime
     additional_properties: Dict[str, Any] = {}
-    __properties: ClassVar[List[str]] = ["dag_id", "dag_run_conf", "from_date", "max_active_runs", "reprocess_behavior", "run_backwards", "to_date"]
+    __properties: ClassVar[List[str]] = ["dag_id", "dag_run_conf", "from_date", "max_active_runs", "reprocess_behavior", "run_backwards", "run_on_latest_version", "to_date"]
 
     model_config = ConfigDict(
         populate_by_name=True,
@@ -102,6 +103,7 @@
             "max_active_runs": obj.get("max_active_runs") if obj.get("max_active_runs") is not None else 10,
             "reprocess_behavior": obj.get("reprocess_behavior"),
             "run_backwards": obj.get("run_backwards") if obj.get("run_backwards") is not None else False,
+            "run_on_latest_version": obj.get("run_on_latest_version") if obj.get("run_on_latest_version") is not None else True,
             "to_date": obj.get("to_date")
         })
         # store additional fields in additional_properties
diff --git a/airflow_client/client/models/dag_version_response.py b/airflow_client/client/models/dag_version_response.py
index b9ed86d..c0bcc75 100644
--- a/airflow_client/client/models/dag_version_response.py
+++ b/airflow_client/client/models/dag_version_response.py
@@ -20,6 +20,7 @@
 from datetime import datetime
 from pydantic import BaseModel, ConfigDict, StrictInt, StrictStr
 from typing import Any, ClassVar, Dict, List, Optional
+from uuid import UUID
 from typing import Optional, Set
 from typing_extensions import Self
 
@@ -33,7 +34,7 @@
     created_at: datetime
     dag_display_name: StrictStr
     dag_id: StrictStr
-    id: StrictStr
+    id: UUID
     version_number: StrictInt
     __properties: ClassVar[List[str]] = ["bundle_name", "bundle_url", "bundle_version", "created_at", "dag_display_name", "dag_id", "id", "version_number"]
 
diff --git a/airflow_client/client/models/detail.py b/airflow_client/client/models/detail.py
index 9ac84cd..deb475f 100644
--- a/airflow_client/client/models/detail.py
+++ b/airflow_client/client/models/detail.py
@@ -23,7 +23,7 @@
 from typing_extensions import Literal, Self
 from pydantic import Field
 
-DETAIL_ANY_OF_SCHEMAS = ["object", "str"]
+DETAIL_ANY_OF_SCHEMAS = ["Dict[str, object]", "str"]
 
 class Detail(BaseModel):
     """
@@ -32,13 +32,13 @@
 
     # data type: str
     anyof_schema_1_validator: Optional[StrictStr] = None
-    # data type: object
+    # data type: Dict[str, object]
     anyof_schema_2_validator: Optional[Dict[str, Any]] = None
     if TYPE_CHECKING:
-        actual_instance: Optional[Union[object, str]] = None
+        actual_instance: Optional[Union[Dict[str, object], str]] = None
     else:
         actual_instance: Any = None
-    any_of_schemas: Set[str] = { "object", "str" }
+    any_of_schemas: Set[str] = { "Dict[str, object]", "str" }
 
     model_config = {
         "validate_assignment": True,
@@ -65,7 +65,7 @@
             return v
         except (ValidationError, ValueError) as e:
             error_messages.append(str(e))
-        # validate data type: object
+        # validate data type: Dict[str, object]
         try:
             instance.anyof_schema_2_validator = v
             return v
@@ -73,7 +73,7 @@
             error_messages.append(str(e))
         if error_messages:
             # no match
-            raise ValueError("No match found when setting the actual_instance in Detail with anyOf schemas: object, str. Details: " + ", ".join(error_messages))
+            raise ValueError("No match found when setting the actual_instance in Detail with anyOf schemas: Dict[str, object], str. Details: " + ", ".join(error_messages))
         else:
             return v
 
@@ -95,7 +95,7 @@
             return instance
         except (ValidationError, ValueError) as e:
             error_messages.append(str(e))
-        # deserialize data into object
+        # deserialize data into Dict[str, object]
         try:
             # validation
             instance.anyof_schema_2_validator = json.loads(json_str)
@@ -107,7 +107,7 @@
 
         if error_messages:
             # no match
-            raise ValueError("No match found when deserializing the JSON string into Detail with anyOf schemas: object, str. Details: " + ", ".join(error_messages))
+            raise ValueError("No match found when deserializing the JSON string into Detail with anyOf schemas: Dict[str, object], str. Details: " + ", ".join(error_messages))
         else:
             return instance
 
@@ -121,7 +121,7 @@
         else:
             return json.dumps(self.actual_instance)
 
-    def to_dict(self) -> Optional[Union[Dict[str, Any], object, str]]:
+    def to_dict(self) -> Optional[Union[Dict[str, Any], Dict[str, object], str]]:
         """Returns the dict representation of the actual instance"""
         if self.actual_instance is None:
             return None
diff --git a/airflow_client/client/models/external_view_response.py b/airflow_client/client/models/external_view_response.py
index 7ae8e81..6f559a9 100644
--- a/airflow_client/client/models/external_view_response.py
+++ b/airflow_client/client/models/external_view_response.py
@@ -33,6 +33,7 @@
     icon_dark_mode: Optional[StrictStr] = None
     name: StrictStr
     url_route: Optional[StrictStr] = None
+    additional_properties: Dict[str, Any] = {}
     __properties: ClassVar[List[str]] = ["category", "destination", "href", "icon", "icon_dark_mode", "name", "url_route"]
 
     @field_validator('destination')
@@ -75,8 +76,10 @@
         * `None` is only added to the output dict for nullable fields that
           were set at model initialization. Other fields with value `None`
           are ignored.
+        * Fields in `self.additional_properties` are added to the output dict.
         """
         excluded_fields: Set[str] = set([
+            "additional_properties",
         ])
 
         _dict = self.model_dump(
@@ -84,6 +87,11 @@
             exclude=excluded_fields,
             exclude_none=True,
         )
+        # puts key-value pairs in additional_properties in the top level
+        if self.additional_properties is not None:
+            for _key, _value in self.additional_properties.items():
+                _dict[_key] = _value
+
         return _dict
 
     @classmethod
@@ -104,6 +112,11 @@
             "name": obj.get("name"),
             "url_route": obj.get("url_route")
         })
+        # store additional fields in additional_properties
+        for _key in obj.keys():
+            if _key not in cls.__properties:
+                _obj.additional_properties[_key] = obj.get(_key)
+
         return _obj
 
 
diff --git a/airflow_client/client/models/fast_api_app_response.py b/airflow_client/client/models/fast_api_app_response.py
index 7de93fe..ee0a8c2 100644
--- a/airflow_client/client/models/fast_api_app_response.py
+++ b/airflow_client/client/models/fast_api_app_response.py
@@ -29,6 +29,7 @@
     app: StrictStr
     name: StrictStr
     url_prefix: StrictStr
+    additional_properties: Dict[str, Any] = {}
     __properties: ClassVar[List[str]] = ["app", "name", "url_prefix"]
 
     model_config = ConfigDict(
@@ -61,8 +62,10 @@
         * `None` is only added to the output dict for nullable fields that
           were set at model initialization. Other fields with value `None`
           are ignored.
+        * Fields in `self.additional_properties` are added to the output dict.
         """
         excluded_fields: Set[str] = set([
+            "additional_properties",
         ])
 
         _dict = self.model_dump(
@@ -70,6 +73,11 @@
             exclude=excluded_fields,
             exclude_none=True,
         )
+        # puts key-value pairs in additional_properties in the top level
+        if self.additional_properties is not None:
+            for _key, _value in self.additional_properties.items():
+                _dict[_key] = _value
+
         return _dict
 
     @classmethod
@@ -86,6 +94,11 @@
             "name": obj.get("name"),
             "url_prefix": obj.get("url_prefix")
         })
+        # store additional fields in additional_properties
+        for _key in obj.keys():
+            if _key not in cls.__properties:
+                _obj.additional_properties[_key] = obj.get(_key)
+
         return _obj
 
 
diff --git a/airflow_client/client/models/fast_api_root_middleware_response.py b/airflow_client/client/models/fast_api_root_middleware_response.py
index 7a771df..788f4d4 100644
--- a/airflow_client/client/models/fast_api_root_middleware_response.py
+++ b/airflow_client/client/models/fast_api_root_middleware_response.py
@@ -28,6 +28,7 @@
     """ # noqa: E501
     middleware: StrictStr
     name: StrictStr
+    additional_properties: Dict[str, Any] = {}
     __properties: ClassVar[List[str]] = ["middleware", "name"]
 
     model_config = ConfigDict(
@@ -60,8 +61,10 @@
         * `None` is only added to the output dict for nullable fields that
           were set at model initialization. Other fields with value `None`
           are ignored.
+        * Fields in `self.additional_properties` are added to the output dict.
         """
         excluded_fields: Set[str] = set([
+            "additional_properties",
         ])
 
         _dict = self.model_dump(
@@ -69,6 +72,11 @@
             exclude=excluded_fields,
             exclude_none=True,
         )
+        # puts key-value pairs in additional_properties in the top level
+        if self.additional_properties is not None:
+            for _key, _value in self.additional_properties.items():
+                _dict[_key] = _value
+
         return _dict
 
     @classmethod
@@ -84,6 +92,11 @@
             "middleware": obj.get("middleware"),
             "name": obj.get("name")
         })
+        # store additional fields in additional_properties
+        for _key in obj.keys():
+            if _key not in cls.__properties:
+                _obj.additional_properties[_key] = obj.get(_key)
+
         return _obj
 
 
diff --git a/airflow_client/client/models/provider_response.py b/airflow_client/client/models/provider_response.py
index 4e58fd0..b551fa2 100644
--- a/airflow_client/client/models/provider_response.py
+++ b/airflow_client/client/models/provider_response.py
@@ -18,7 +18,7 @@
 import json
 
 from pydantic import BaseModel, ConfigDict, StrictStr
-from typing import Any, ClassVar, Dict, List
+from typing import Any, ClassVar, Dict, List, Optional
 from typing import Optional, Set
 from typing_extensions import Self
 
@@ -27,9 +27,10 @@
     Provider serializer for responses.
     """ # noqa: E501
     description: StrictStr
+    documentation_url: Optional[StrictStr] = None
     package_name: StrictStr
     version: StrictStr
-    __properties: ClassVar[List[str]] = ["description", "package_name", "version"]
+    __properties: ClassVar[List[str]] = ["description", "documentation_url", "package_name", "version"]
 
     model_config = ConfigDict(
         populate_by_name=True,
@@ -83,6 +84,7 @@
 
         _obj = cls.model_validate({
             "description": obj.get("description"),
+            "documentation_url": obj.get("documentation_url"),
             "package_name": obj.get("package_name"),
             "version": obj.get("version")
         })
diff --git a/airflow_client/client/models/react_app_response.py b/airflow_client/client/models/react_app_response.py
index 50046ea..7fb82c5 100644
--- a/airflow_client/client/models/react_app_response.py
+++ b/airflow_client/client/models/react_app_response.py
@@ -33,6 +33,7 @@
     icon_dark_mode: Optional[StrictStr] = None
     name: StrictStr
     url_route: Optional[StrictStr] = None
+    additional_properties: Dict[str, Any] = {}
     __properties: ClassVar[List[str]] = ["bundle_url", "category", "destination", "icon", "icon_dark_mode", "name", "url_route"]
 
     @field_validator('destination')
@@ -75,8 +76,10 @@
         * `None` is only added to the output dict for nullable fields that
           were set at model initialization. Other fields with value `None`
           are ignored.
+        * Fields in `self.additional_properties` are added to the output dict.
         """
         excluded_fields: Set[str] = set([
+            "additional_properties",
         ])
 
         _dict = self.model_dump(
@@ -84,6 +87,11 @@
             exclude=excluded_fields,
             exclude_none=True,
         )
+        # puts key-value pairs in additional_properties in the top level
+        if self.additional_properties is not None:
+            for _key, _value in self.additional_properties.items():
+                _dict[_key] = _value
+
         return _dict
 
     @classmethod
@@ -104,6 +112,11 @@
             "name": obj.get("name"),
             "url_route": obj.get("url_route")
         })
+        # store additional fields in additional_properties
+        for _key in obj.keys():
+            if _key not in cls.__properties:
+                _obj.additional_properties[_key] = obj.get(_key)
+
         return _obj
 
 
diff --git a/airflow_client/client/models/structured_log_message.py b/airflow_client/client/models/structured_log_message.py
index 1f8e8b0..af5fc29 100644
--- a/airflow_client/client/models/structured_log_message.py
+++ b/airflow_client/client/models/structured_log_message.py
@@ -29,6 +29,7 @@
     """ # noqa: E501
     event: StrictStr
     timestamp: Optional[datetime] = None
+    additional_properties: Dict[str, Any] = {}
     __properties: ClassVar[List[str]] = ["event", "timestamp"]
 
     model_config = ConfigDict(
@@ -61,8 +62,10 @@
         * `None` is only added to the output dict for nullable fields that
           were set at model initialization. Other fields with value `None`
           are ignored.
+        * Fields in `self.additional_properties` are added to the output dict.
         """
         excluded_fields: Set[str] = set([
+            "additional_properties",
         ])
 
         _dict = self.model_dump(
@@ -70,6 +73,11 @@
             exclude=excluded_fields,
             exclude_none=True,
         )
+        # puts key-value pairs in additional_properties in the top level
+        if self.additional_properties is not None:
+            for _key, _value in self.additional_properties.items():
+                _dict[_key] = _value
+
         return _dict
 
     @classmethod
@@ -85,6 +93,11 @@
             "event": obj.get("event"),
             "timestamp": obj.get("timestamp")
         })
+        # store additional fields in additional_properties
+        for _key in obj.keys():
+            if _key not in cls.__properties:
+                _obj.additional_properties[_key] = obj.get(_key)
+
         return _obj
 
 
diff --git a/airflow_client/client/rest.py b/airflow_client/client/rest.py
index aa3c3ca..be0ad3f 100644
--- a/airflow_client/client/rest.py
+++ b/airflow_client/client/rest.py
@@ -48,12 +48,17 @@
             self.data = self.response.data
         return self.data
 
+    @property
+    def headers(self):
+        """Returns a dictionary of response headers."""
+        return self.response.headers
+
     def getheaders(self):
-        """Returns a dictionary of the response headers."""
+        """Returns a dictionary of the response headers; use ``headers`` instead."""
         return self.response.headers
 
     def getheader(self, name, default=None):
-        """Returns a given response header."""
+        """Returns a given response header; use ``headers.get()`` instead."""
         return self.response.headers.get(name, default)
 
 
diff --git a/docs/BackfillPostBody.md b/docs/BackfillPostBody.md
index d100fdf..17bd2a7 100644
--- a/docs/BackfillPostBody.md
+++ b/docs/BackfillPostBody.md
@@ -7,11 +7,12 @@
 Name | Type | Description | Notes
 ------------ | ------------- | ------------- | -------------
 **dag_id** | **str** |  | 
-**dag_run_conf** | **object** |  | [optional] 
+**dag_run_conf** | **Dict[str, object]** |  | [optional] 
 **from_date** | **datetime** |  | 
 **max_active_runs** | **int** |  | [optional] [default to 10]
 **reprocess_behavior** | [**ReprocessBehavior**](ReprocessBehavior.md) |  | [optional] 
 **run_backwards** | **bool** |  | [optional] [default to False]
+**run_on_latest_version** | **bool** |  | [optional] [default to True]
 **to_date** | **datetime** |  | 
 
 ## Example
diff --git a/docs/BackfillResponse.md b/docs/BackfillResponse.md
index bf89ab7..5e2cfa5 100644
--- a/docs/BackfillResponse.md
+++ b/docs/BackfillResponse.md
@@ -10,7 +10,7 @@
 **created_at** | **datetime** |  | 
 **dag_display_name** | **str** |  | 
 **dag_id** | **str** |  | 
-**dag_run_conf** | **object** |  | 
+**dag_run_conf** | **Dict[str, object]** |  | 
 **from_date** | **datetime** |  | 
 **id** | **int** |  | 
 **is_paused** | **bool** |  | 
diff --git a/docs/BulkActionResponse.md b/docs/BulkActionResponse.md
index 13e5b94..5f2488b 100644
--- a/docs/BulkActionResponse.md
+++ b/docs/BulkActionResponse.md
@@ -6,7 +6,7 @@
 
 Name | Type | Description | Notes
 ------------ | ------------- | ------------- | -------------
-**errors** | **List[object]** | A list of errors encountered during the operation, each containing details about the issue. | [optional] [default to []]
+**errors** | **List[Dict[str, object]]** | A list of errors encountered during the operation, each containing details about the issue. | [optional] [default to []]
 **success** | **List[str]** | A list of unique id/key representing successful operations. | [optional] [default to []]
 
 ## Example
diff --git a/docs/CreateAssetEventsBody.md b/docs/CreateAssetEventsBody.md
index d5eeb5f..63ce884 100644
--- a/docs/CreateAssetEventsBody.md
+++ b/docs/CreateAssetEventsBody.md
@@ -7,7 +7,7 @@
 Name | Type | Description | Notes
 ------------ | ------------- | ------------- | -------------
 **asset_id** | **int** |  | 
-**extra** | **object** |  | [optional] 
+**extra** | **Dict[str, object]** |  | [optional] 
 
 ## Example
 
diff --git a/docs/DAGApi.md b/docs/DAGApi.md
index b6d5955..c13e29e 100644
--- a/docs/DAGApi.md
+++ b/docs/DAGApi.md
@@ -459,7 +459,6 @@
 ```python
 import airflow_client.client
 from airflow_client.client.models.dag_collection_response import DAGCollectionResponse
-from airflow_client.client.models.dag_run_state import DagRunState
 from airflow_client.client.rest import ApiException
 from pprint import pprint
 
diff --git a/docs/DAGDetailsResponse.md b/docs/DAGDetailsResponse.md
index d04d263..d26ac43 100644
--- a/docs/DAGDetailsResponse.md
+++ b/docs/DAGDetailsResponse.md
@@ -6,7 +6,7 @@
 
 Name | Type | Description | Notes
 ------------ | ------------- | ------------- | -------------
-**asset_expression** | **object** |  | [optional] 
+**asset_expression** | **Dict[str, object]** |  | [optional] 
 **bundle_name** | **str** |  | [optional] 
 **bundle_version** | **str** |  | [optional] 
 **catchup** | **bool** |  | 
@@ -14,7 +14,7 @@
 **dag_display_name** | **str** |  | 
 **dag_id** | **str** |  | 
 **dag_run_timeout** | **str** |  | [optional] 
-**default_args** | **object** |  | [optional] 
+**default_args** | **Dict[str, object]** |  | [optional] 
 **description** | **str** |  | [optional] 
 **doc_md** | **str** |  | [optional] 
 **end_date** | **datetime** |  | [optional] 
@@ -40,7 +40,7 @@
 **next_dagrun_run_after** | **datetime** |  | [optional] 
 **owner_links** | **Dict[str, str]** |  | [optional] 
 **owners** | **List[str]** |  | 
-**params** | **object** |  | [optional] 
+**params** | **Dict[str, object]** |  | [optional] 
 **relative_fileloc** | **str** |  | [optional] 
 **render_template_as_native_obj** | **bool** |  | 
 **start_date** | **datetime** |  | [optional] 
diff --git a/docs/DAGRunResponse.md b/docs/DAGRunResponse.md
index 9df1d8e..ed820f3 100644
--- a/docs/DAGRunResponse.md
+++ b/docs/DAGRunResponse.md
@@ -7,7 +7,7 @@
 Name | Type | Description | Notes
 ------------ | ------------- | ------------- | -------------
 **bundle_version** | **str** |  | [optional] 
-**conf** | **object** |  | [optional] 
+**conf** | **Dict[str, object]** |  | [optional] 
 **dag_display_name** | **str** |  | 
 **dag_id** | **str** |  | 
 **dag_run_id** | **str** |  | 
diff --git a/docs/DagVersionResponse.md b/docs/DagVersionResponse.md
index a89bf6a..122061e 100644
--- a/docs/DagVersionResponse.md
+++ b/docs/DagVersionResponse.md
@@ -12,7 +12,7 @@
 **created_at** | **datetime** |  | 
 **dag_display_name** | **str** |  | 
 **dag_id** | **str** |  | 
-**id** | **str** |  | 
+**id** | **UUID** |  | 
 **version_number** | **int** |  | 
 
 ## Example
diff --git a/docs/DagWarningApi.md b/docs/DagWarningApi.md
index c33c217..e2ef2df 100644
--- a/docs/DagWarningApi.md
+++ b/docs/DagWarningApi.md
@@ -22,7 +22,6 @@
 ```python
 import airflow_client.client
 from airflow_client.client.models.dag_warning_collection_response import DAGWarningCollectionResponse
-from airflow_client.client.models.dag_warning_type import DagWarningType
 from airflow_client.client.rest import ApiException
 from pprint import pprint
 
diff --git a/docs/HITLDetail.md b/docs/HITLDetail.md
index 951d878..1a422ef 100644
--- a/docs/HITLDetail.md
+++ b/docs/HITLDetail.md
@@ -13,8 +13,8 @@
 **defaults** | **List[str]** |  | [optional] 
 **multiple** | **bool** |  | [optional] [default to False]
 **options** | **List[str]** |  | 
-**params** | **object** |  | [optional] 
-**params_input** | **object** |  | [optional] 
+**params** | **Dict[str, object]** |  | [optional] 
+**params_input** | **Dict[str, object]** |  | [optional] 
 **responded_at** | **datetime** |  | [optional] 
 **responded_by_user** | [**HITLUser**](HITLUser.md) |  | [optional] 
 **response_received** | **bool** |  | [optional] [default to False]
diff --git a/docs/HITLDetailResponse.md b/docs/HITLDetailResponse.md
index 2bde7a2..f6f0e94 100644
--- a/docs/HITLDetailResponse.md
+++ b/docs/HITLDetailResponse.md
@@ -7,7 +7,7 @@
 Name | Type | Description | Notes
 ------------ | ------------- | ------------- | -------------
 **chosen_options** | **List[str]** |  | 
-**params_input** | **object** |  | [optional] 
+**params_input** | **Dict[str, object]** |  | [optional] 
 **responded_at** | **datetime** |  | 
 **responded_by** | [**HITLUser**](HITLUser.md) |  | 
 
diff --git a/docs/ProviderResponse.md b/docs/ProviderResponse.md
index 39e6732..9d9c3e9 100644
--- a/docs/ProviderResponse.md
+++ b/docs/ProviderResponse.md
@@ -7,6 +7,7 @@
 Name | Type | Description | Notes
 ------------ | ------------- | ------------- | -------------
 **description** | **str** |  | 
+**documentation_url** | **str** |  | [optional] 
 **package_name** | **str** |  | 
 **version** | **str** |  | 
 
diff --git a/docs/ResponseClearDagRun.md b/docs/ResponseClearDagRun.md
index 231a042..7af527e 100644
--- a/docs/ResponseClearDagRun.md
+++ b/docs/ResponseClearDagRun.md
@@ -8,7 +8,7 @@
 **task_instances** | [**List[TaskInstanceResponse]**](TaskInstanceResponse.md) |  | 
 **total_entries** | **int** |  | 
 **bundle_version** | **str** |  | [optional] 
-**conf** | **object** |  | [optional] 
+**conf** | **Dict[str, object]** |  | [optional] 
 **dag_display_name** | **str** |  | 
 **dag_id** | **str** |  | 
 **dag_run_id** | **str** |  | 
diff --git a/docs/TaskInstanceApi.md b/docs/TaskInstanceApi.md
index ae3c8f8..89fe95c 100644
--- a/docs/TaskInstanceApi.md
+++ b/docs/TaskInstanceApi.md
@@ -1590,7 +1590,7 @@
 [[Back to top]](#) [[Back to API list]](../README.md#documentation-for-api-endpoints) [[Back to Model list]](../README.md#documentation-for-models) [[Back to README]](../README.md)
 
 # **get_task_instances**
-> TaskInstanceCollectionResponse get_task_instances(dag_id, dag_run_id, task_id=task_id, run_after_gte=run_after_gte, run_after_gt=run_after_gt, run_after_lte=run_after_lte, run_after_lt=run_after_lt, logical_date_gte=logical_date_gte, logical_date_gt=logical_date_gt, logical_date_lte=logical_date_lte, logical_date_lt=logical_date_lt, start_date_gte=start_date_gte, start_date_gt=start_date_gt, start_date_lte=start_date_lte, start_date_lt=start_date_lt, end_date_gte=end_date_gte, end_date_gt=end_date_gt, end_date_lte=end_date_lte, end_date_lt=end_date_lt, updated_at_gte=updated_at_gte, updated_at_gt=updated_at_gt, updated_at_lte=updated_at_lte, updated_at_lt=updated_at_lt, duration_gte=duration_gte, duration_gt=duration_gt, duration_lte=duration_lte, duration_lt=duration_lt, task_display_name_pattern=task_display_name_pattern, state=state, pool=pool, queue=queue, executor=executor, version_number=version_number, try_number=try_number, operator=operator, map_index=map_index, limit=limit, offset=offset, order_by=order_by)
+> TaskInstanceCollectionResponse get_task_instances(dag_id, dag_run_id, task_id=task_id, run_after_gte=run_after_gte, run_after_gt=run_after_gt, run_after_lte=run_after_lte, run_after_lt=run_after_lt, logical_date_gte=logical_date_gte, logical_date_gt=logical_date_gt, logical_date_lte=logical_date_lte, logical_date_lt=logical_date_lt, start_date_gte=start_date_gte, start_date_gt=start_date_gt, start_date_lte=start_date_lte, start_date_lt=start_date_lt, end_date_gte=end_date_gte, end_date_gt=end_date_gt, end_date_lte=end_date_lte, end_date_lt=end_date_lt, updated_at_gte=updated_at_gte, updated_at_gt=updated_at_gt, updated_at_lte=updated_at_lte, updated_at_lt=updated_at_lt, duration_gte=duration_gte, duration_gt=duration_gt, duration_lte=duration_lte, duration_lt=duration_lt, task_display_name_pattern=task_display_name_pattern, task_group_id=task_group_id, state=state, pool=pool, queue=queue, executor=executor, version_number=version_number, try_number=try_number, operator=operator, map_index=map_index, limit=limit, offset=offset, order_by=order_by)
 
 Get Task Instances
 
@@ -1660,6 +1660,7 @@
     duration_lte = 3.4 # float |  (optional)
     duration_lt = 3.4 # float |  (optional)
     task_display_name_pattern = 'task_display_name_pattern_example' # str | SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. (optional)
+    task_group_id = 'task_group_id_example' # str | Filter by exact task group ID. Returns all tasks within the specified task group. (optional)
     state = ['state_example'] # List[str] |  (optional)
     pool = ['pool_example'] # List[str] |  (optional)
     queue = ['queue_example'] # List[str] |  (optional)
@@ -1674,7 +1675,7 @@
 
     try:
         # Get Task Instances
-        api_response = api_instance.get_task_instances(dag_id, dag_run_id, task_id=task_id, run_after_gte=run_after_gte, run_after_gt=run_after_gt, run_after_lte=run_after_lte, run_after_lt=run_after_lt, logical_date_gte=logical_date_gte, logical_date_gt=logical_date_gt, logical_date_lte=logical_date_lte, logical_date_lt=logical_date_lt, start_date_gte=start_date_gte, start_date_gt=start_date_gt, start_date_lte=start_date_lte, start_date_lt=start_date_lt, end_date_gte=end_date_gte, end_date_gt=end_date_gt, end_date_lte=end_date_lte, end_date_lt=end_date_lt, updated_at_gte=updated_at_gte, updated_at_gt=updated_at_gt, updated_at_lte=updated_at_lte, updated_at_lt=updated_at_lt, duration_gte=duration_gte, duration_gt=duration_gt, duration_lte=duration_lte, duration_lt=duration_lt, task_display_name_pattern=task_display_name_pattern, state=state, pool=pool, queue=queue, executor=executor, version_number=version_number, try_number=try_number, operator=operator, map_index=map_index, limit=limit, offset=offset, order_by=order_by)
+        api_response = api_instance.get_task_instances(dag_id, dag_run_id, task_id=task_id, run_after_gte=run_after_gte, run_after_gt=run_after_gt, run_after_lte=run_after_lte, run_after_lt=run_after_lt, logical_date_gte=logical_date_gte, logical_date_gt=logical_date_gt, logical_date_lte=logical_date_lte, logical_date_lt=logical_date_lt, start_date_gte=start_date_gte, start_date_gt=start_date_gt, start_date_lte=start_date_lte, start_date_lt=start_date_lt, end_date_gte=end_date_gte, end_date_gt=end_date_gt, end_date_lte=end_date_lte, end_date_lt=end_date_lt, updated_at_gte=updated_at_gte, updated_at_gt=updated_at_gt, updated_at_lte=updated_at_lte, updated_at_lt=updated_at_lt, duration_gte=duration_gte, duration_gt=duration_gt, duration_lte=duration_lte, duration_lt=duration_lt, task_display_name_pattern=task_display_name_pattern, task_group_id=task_group_id, state=state, pool=pool, queue=queue, executor=executor, version_number=version_number, try_number=try_number, operator=operator, map_index=map_index, limit=limit, offset=offset, order_by=order_by)
         print("The response of TaskInstanceApi->get_task_instances:\n")
         pprint(api_response)
     except Exception as e:
@@ -1716,6 +1717,7 @@
  **duration_lte** | **float**|  | [optional] 
  **duration_lt** | **float**|  | [optional] 
  **task_display_name_pattern** | **str**| SQL LIKE expression — use `%` / `_` wildcards (e.g. `%customer_%`). Regular expressions are **not** supported. | [optional] 
+ **task_group_id** | **str**| Filter by exact task group ID. Returns all tasks within the specified task group. | [optional] 
  **state** | [**List[str]**](str.md)|  | [optional] 
  **pool** | [**List[str]**](str.md)|  | [optional] 
  **queue** | [**List[str]**](str.md)|  | [optional] 
diff --git a/docs/TaskInstanceResponse.md b/docs/TaskInstanceResponse.md
index 8fd5904..8d2c6d1 100644
--- a/docs/TaskInstanceResponse.md
+++ b/docs/TaskInstanceResponse.md
@@ -28,7 +28,7 @@
 **priority_weight** | **int** |  | [optional] 
 **queue** | **str** |  | [optional] 
 **queued_when** | **datetime** |  | [optional] 
-**rendered_fields** | **object** |  | [optional] 
+**rendered_fields** | **Dict[str, object]** |  | [optional] 
 **rendered_map_index** | **str** |  | [optional] 
 **run_after** | **datetime** |  | 
 **scheduled_when** | **datetime** |  | [optional] 
diff --git a/docs/TaskResponse.md b/docs/TaskResponse.md
index 99f49b9..6489d8d 100644
--- a/docs/TaskResponse.md
+++ b/docs/TaskResponse.md
@@ -6,7 +6,7 @@
 
 Name | Type | Description | Notes
 ------------ | ------------- | ------------- | -------------
-**class_ref** | **object** |  | [optional] 
+**class_ref** | **Dict[str, object]** |  | [optional] 
 **depends_on_past** | **bool** |  | 
 **doc_md** | **str** |  | [optional] 
 **downstream_task_ids** | **List[str]** |  | [optional] 
@@ -16,7 +16,7 @@
 **is_mapped** | **bool** |  | [optional] 
 **operator_name** | **str** |  | [optional] 
 **owner** | **str** |  | [optional] 
-**params** | **object** |  | [optional] 
+**params** | **Dict[str, object]** |  | [optional] 
 **pool** | **str** |  | [optional] 
 **pool_slots** | **float** |  | [optional] 
 **priority_weight** | **float** |  | [optional] 
diff --git a/docs/TriggerDAGRunPostBody.md b/docs/TriggerDAGRunPostBody.md
index 305a90d..a119bb6 100644
--- a/docs/TriggerDAGRunPostBody.md
+++ b/docs/TriggerDAGRunPostBody.md
@@ -6,7 +6,7 @@
 
 Name | Type | Description | Notes
 ------------ | ------------- | ------------- | -------------
-**conf** | **object** |  | [optional] 
+**conf** | **Dict[str, object]** |  | [optional] 
 **dag_run_id** | **str** |  | [optional] 
 **data_interval_end** | **datetime** |  | [optional] 
 **data_interval_start** | **datetime** |  | [optional] 
diff --git a/docs/UpdateHITLDetailPayload.md b/docs/UpdateHITLDetailPayload.md
index 4605c63..1746a71 100644
--- a/docs/UpdateHITLDetailPayload.md
+++ b/docs/UpdateHITLDetailPayload.md
@@ -7,7 +7,7 @@
 Name | Type | Description | Notes
 ------------ | ------------- | ------------- | -------------
 **chosen_options** | **List[str]** |  | 
-**params_input** | **object** |  | [optional] 
+**params_input** | **Dict[str, object]** |  | [optional] 
 
 ## Example
 
diff --git a/pyproject.toml b/pyproject.toml
index 59593b1..2cef853 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -51,7 +51,7 @@
 dependencies = [
     "pydantic >= 2.11.0",
     "python-dateutil",
-    "urllib3 >= 2.1.0",
+    "urllib3>=2.1.0,!=2.6.0",
 ]
 
 [project.urls]
diff --git a/spec/v2.yaml b/spec/v2.yaml
index 53702b1..66c6fae 100644
--- a/spec/v2.yaml
+++ b/spec/v2.yaml
@@ -261,6 +261,10 @@
           default: false
           title: Run Backwards
           type: boolean
+        run_on_latest_version:
+          default: true
+          title: Run On Latest Version
+          type: boolean
         to_date:
           format: date-time
           title: To Date
@@ -2639,6 +2643,9 @@
         description:
           title: Description
           type: string
+        documentation_url:
+          nullable: true
+          type: string
         package_name:
           title: Package Name
           type: string
@@ -7528,6 +7535,14 @@
         schema:
           nullable: true
           type: string
+      - description: Filter by exact task group ID. Returns all tasks within the specified
+          task group.
+        in: query
+        name: task_group_id
+        required: false
+        schema:
+          nullable: true
+          type: string
       - in: query
         name: state
         required: false
diff --git a/test/test_asset_collection_response.py b/test/test_asset_collection_response.py
index 0145173..fef5e94 100644
--- a/test/test_asset_collection_response.py
+++ b/test/test_asset_collection_response.py
@@ -56,9 +56,7 @@
                             }, 
                         group = '', 
                         id = 56, 
-                        last_asset_event = airflow_client.client.models.last_asset_event_response.LastAssetEventResponse(
-                            id = 0.0, 
-                            timestamp = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), ), 
+                        last_asset_event = null, 
                         name = '', 
                         producing_tasks = [
                             airflow_client.client.models.task_outlet_asset_reference.TaskOutletAssetReference(
@@ -101,9 +99,7 @@
                             }, 
                         group = '', 
                         id = 56, 
-                        last_asset_event = airflow_client.client.models.last_asset_event_response.LastAssetEventResponse(
-                            id = 0.0, 
-                            timestamp = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), ), 
+                        last_asset_event = null, 
                         name = '', 
                         producing_tasks = [
                             airflow_client.client.models.task_outlet_asset_reference.TaskOutletAssetReference(
diff --git a/test/test_backfill_collection_response.py b/test/test_backfill_collection_response.py
index 5fac5b1..d125e39 100644
--- a/test/test_backfill_collection_response.py
+++ b/test/test_backfill_collection_response.py
@@ -41,7 +41,7 @@
                         created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                         dag_display_name = '', 
                         dag_id = '', 
-                        dag_run_conf = airflow_client.client.models.dag_run_conf.Dag Run Conf(), 
+                        dag_run_conf = { }, 
                         from_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                         id = 0.0, 
                         is_paused = True, 
@@ -60,7 +60,7 @@
                         created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                         dag_display_name = '', 
                         dag_id = '', 
-                        dag_run_conf = airflow_client.client.models.dag_run_conf.Dag Run Conf(), 
+                        dag_run_conf = { }, 
                         from_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                         id = 0.0, 
                         is_paused = True, 
diff --git a/test/test_backfill_post_body.py b/test/test_backfill_post_body.py
index cb3e380..3eb278b 100644
--- a/test/test_backfill_post_body.py
+++ b/test/test_backfill_post_body.py
@@ -36,11 +36,12 @@
         if include_optional:
             return BackfillPostBody(
                 dag_id = '',
-                dag_run_conf = airflow_client.client.models.dag_run_conf.Dag Run Conf(),
+                dag_run_conf = { },
                 from_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
                 max_active_runs = 56,
                 reprocess_behavior = 'failed',
                 run_backwards = True,
+                run_on_latest_version = True,
                 to_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f')
             )
         else:
diff --git a/test/test_backfill_response.py b/test/test_backfill_response.py
index 64b398a..670c8c3 100644
--- a/test/test_backfill_response.py
+++ b/test/test_backfill_response.py
@@ -39,7 +39,7 @@
                 created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
                 dag_display_name = '',
                 dag_id = '',
-                dag_run_conf = airflow_client.client.models.dag_run_conf.Dag Run Conf(),
+                dag_run_conf = { },
                 from_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
                 id = 0.0,
                 is_paused = True,
@@ -53,7 +53,7 @@
                 created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
                 dag_display_name = '',
                 dag_id = '',
-                dag_run_conf = airflow_client.client.models.dag_run_conf.Dag Run Conf(),
+                dag_run_conf = { },
                 from_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
                 id = 0.0,
                 is_paused = True,
diff --git a/test/test_bulk_action_response.py b/test/test_bulk_action_response.py
index 2e3625e..329a8c2 100644
--- a/test/test_bulk_action_response.py
+++ b/test/test_bulk_action_response.py
@@ -36,7 +36,7 @@
         if include_optional:
             return BulkActionResponse(
                 errors = [
-                    None
+                    { }
                     ],
                 success = [
                     ''
diff --git a/test/test_bulk_create_action_bulk_task_instance_body.py b/test/test_bulk_create_action_bulk_task_instance_body.py
index b6e4fcd..80b6bd7 100644
--- a/test/test_bulk_create_action_bulk_task_instance_body.py
+++ b/test/test_bulk_create_action_bulk_task_instance_body.py
@@ -44,7 +44,7 @@
                         include_past = True, 
                         include_upstream = True, 
                         map_index = 56, 
-                        new_state = 'removed', 
+                        new_state = null, 
                         note = '', 
                         task_id = '', )
                     ]
@@ -59,7 +59,7 @@
                         include_past = True, 
                         include_upstream = True, 
                         map_index = 56, 
-                        new_state = 'removed', 
+                        new_state = null, 
                         note = '', 
                         task_id = '', )
                     ],
diff --git a/test/test_bulk_response.py b/test/test_bulk_response.py
index c9ef6be..c6d8f00 100644
--- a/test/test_bulk_response.py
+++ b/test/test_bulk_response.py
@@ -37,21 +37,21 @@
             return BulkResponse(
                 create = airflow_client.client.models.bulk_action_response.BulkActionResponse(
                     errors = [
-                        None
+                        { }
                         ], 
                     success = [
                         ''
                         ], ),
                 delete = airflow_client.client.models.bulk_action_response.BulkActionResponse(
                     errors = [
-                        None
+                        { }
                         ], 
                     success = [
                         ''
                         ], ),
                 update = airflow_client.client.models.bulk_action_response.BulkActionResponse(
                     errors = [
-                        None
+                        { }
                         ], 
                     success = [
                         ''
diff --git a/test/test_bulk_update_action_bulk_task_instance_body.py b/test/test_bulk_update_action_bulk_task_instance_body.py
index 56ab7b3..3e6c666 100644
--- a/test/test_bulk_update_action_bulk_task_instance_body.py
+++ b/test/test_bulk_update_action_bulk_task_instance_body.py
@@ -44,7 +44,7 @@
                         include_past = True, 
                         include_upstream = True, 
                         map_index = 56, 
-                        new_state = 'removed', 
+                        new_state = null, 
                         note = '', 
                         task_id = '', )
                     ]
@@ -59,7 +59,7 @@
                         include_past = True, 
                         include_upstream = True, 
                         map_index = 56, 
-                        new_state = 'removed', 
+                        new_state = null, 
                         note = '', 
                         task_id = '', )
                     ],
diff --git a/test/test_create_asset_events_body.py b/test/test_create_asset_events_body.py
index 00beb38..345ef63 100644
--- a/test/test_create_asset_events_body.py
+++ b/test/test_create_asset_events_body.py
@@ -36,7 +36,7 @@
         if include_optional:
             return CreateAssetEventsBody(
                 asset_id = 56,
-                extra = airflow_client.client.models.extra.Extra()
+                extra = { }
             )
         else:
             return CreateAssetEventsBody(
diff --git a/test/test_dag_details_response.py b/test/test_dag_details_response.py
index 8473cc7..5454bcb 100644
--- a/test/test_dag_details_response.py
+++ b/test/test_dag_details_response.py
@@ -35,7 +35,7 @@
         model = DAGDetailsResponse()
         if include_optional:
             return DAGDetailsResponse(
-                asset_expression = None,
+                asset_expression = { },
                 bundle_name = '',
                 bundle_version = '',
                 catchup = True,
@@ -43,7 +43,7 @@
                 dag_display_name = '',
                 dag_id = '',
                 dag_run_timeout = '',
-                default_args = None,
+                default_args = { },
                 description = '',
                 doc_md = '',
                 end_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
@@ -81,7 +81,7 @@
                 owners = [
                     ''
                     ],
-                params = None,
+                params = { },
                 relative_fileloc = '',
                 render_template_as_native_obj = True,
                 start_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
diff --git a/test/test_dag_run_collection_response.py b/test/test_dag_run_collection_response.py
index 26f8fa1..e151658 100644
--- a/test/test_dag_run_collection_response.py
+++ b/test/test_dag_run_collection_response.py
@@ -38,7 +38,7 @@
                 dag_runs = [
                     airflow_client.client.models.dag_run_response.DAGRunResponse(
                         bundle_version = '', 
-                        conf = airflow_client.client.models.conf.conf(), 
+                        conf = { }, 
                         dag_display_name = '', 
                         dag_id = '', 
                         dag_run_id = '', 
@@ -65,7 +65,7 @@
                         run_type = 'backfill', 
                         start_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                         state = 'queued', 
-                        triggered_by = 'cli', 
+                        triggered_by = null, 
                         triggering_user_name = '', )
                     ],
                 total_entries = 56
@@ -75,7 +75,7 @@
                 dag_runs = [
                     airflow_client.client.models.dag_run_response.DAGRunResponse(
                         bundle_version = '', 
-                        conf = airflow_client.client.models.conf.conf(), 
+                        conf = { }, 
                         dag_display_name = '', 
                         dag_id = '', 
                         dag_run_id = '', 
@@ -102,7 +102,7 @@
                         run_type = 'backfill', 
                         start_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                         state = 'queued', 
-                        triggered_by = 'cli', 
+                        triggered_by = null, 
                         triggering_user_name = '', )
                     ],
                 total_entries = 56,
diff --git a/test/test_dag_run_response.py b/test/test_dag_run_response.py
index ce41ee3..de722e7 100644
--- a/test/test_dag_run_response.py
+++ b/test/test_dag_run_response.py
@@ -36,7 +36,7 @@
         if include_optional:
             return DAGRunResponse(
                 bundle_version = '',
-                conf = airflow_client.client.models.conf.conf(),
+                conf = { },
                 dag_display_name = '',
                 dag_id = '',
                 dag_run_id = '',
diff --git a/test/test_dag_runs_batch_body.py b/test/test_dag_runs_batch_body.py
index 62555a2..101314e 100644
--- a/test/test_dag_runs_batch_body.py
+++ b/test/test_dag_runs_batch_body.py
@@ -58,7 +58,7 @@
                 start_date_lt = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
                 start_date_lte = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
                 states = [
-                    'queued'
+                    null
                     ]
             )
         else:
diff --git a/test/test_hitl_detail.py b/test/test_hitl_detail.py
index 498270f..d9ee95e 100644
--- a/test/test_hitl_detail.py
+++ b/test/test_hitl_detail.py
@@ -52,8 +52,8 @@
                 options = [
                     ''
                     ],
-                params = airflow_client.client.models.params.Params(),
-                params_input = airflow_client.client.models.params_input.Params Input(),
+                params = { },
+                params_input = { },
                 responded_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
                 responded_by_user = airflow_client.client.models.hitl_user.HITLUser(
                     id = '', 
@@ -64,15 +64,7 @@
                     dag_display_name = '', 
                     dag_id = '', 
                     dag_run_id = '', 
-                    dag_version = airflow_client.client.models.dag_version_response.DagVersionResponse(
-                        bundle_name = '', 
-                        bundle_url = '', 
-                        bundle_version = '', 
-                        created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                        dag_display_name = '', 
-                        dag_id = '', 
-                        id = '', 
-                        version_number = 56, ), 
+                    dag_version = null, 
                     duration = 1.337, 
                     end_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                     executor = '', 
@@ -91,31 +83,16 @@
                     priority_weight = 56, 
                     queue = '', 
                     queued_when = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                    rendered_fields = airflow_client.client.models.rendered_fields.Rendered Fields(), 
+                    rendered_fields = { }, 
                     rendered_map_index = '', 
                     run_after = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                     scheduled_when = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                     start_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                    state = 'removed', 
+                    state = null, 
                     task_display_name = '', 
                     task_id = '', 
-                    trigger = airflow_client.client.models.trigger_response.TriggerResponse(
-                        classpath = '', 
-                        created_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                        id = 56, 
-                        kwargs = '', 
-                        triggerer_id = 56, ), 
-                    triggerer_job = airflow_client.client.models.job_response.JobResponse(
-                        dag_display_name = '', 
-                        dag_id = '', 
-                        end_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                        executor_class = '', 
-                        hostname = '', 
-                        id = 56, 
-                        job_type = '', 
-                        latest_heartbeat = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                        start_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                        unixname = '', ), 
+                    trigger = null, 
+                    triggerer_job = null, 
                     try_number = 56, 
                     unixname = '', )
             )
@@ -130,15 +107,7 @@
                     dag_display_name = '', 
                     dag_id = '', 
                     dag_run_id = '', 
-                    dag_version = airflow_client.client.models.dag_version_response.DagVersionResponse(
-                        bundle_name = '', 
-                        bundle_url = '', 
-                        bundle_version = '', 
-                        created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                        dag_display_name = '', 
-                        dag_id = '', 
-                        id = '', 
-                        version_number = 56, ), 
+                    dag_version = null, 
                     duration = 1.337, 
                     end_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                     executor = '', 
@@ -157,31 +126,16 @@
                     priority_weight = 56, 
                     queue = '', 
                     queued_when = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                    rendered_fields = airflow_client.client.models.rendered_fields.Rendered Fields(), 
+                    rendered_fields = { }, 
                     rendered_map_index = '', 
                     run_after = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                     scheduled_when = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                     start_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                    state = 'removed', 
+                    state = null, 
                     task_display_name = '', 
                     task_id = '', 
-                    trigger = airflow_client.client.models.trigger_response.TriggerResponse(
-                        classpath = '', 
-                        created_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                        id = 56, 
-                        kwargs = '', 
-                        triggerer_id = 56, ), 
-                    triggerer_job = airflow_client.client.models.job_response.JobResponse(
-                        dag_display_name = '', 
-                        dag_id = '', 
-                        end_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                        executor_class = '', 
-                        hostname = '', 
-                        id = 56, 
-                        job_type = '', 
-                        latest_heartbeat = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                        start_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                        unixname = '', ), 
+                    trigger = null, 
+                    triggerer_job = null, 
                     try_number = 56, 
                     unixname = '', ),
         )
diff --git a/test/test_hitl_detail_collection.py b/test/test_hitl_detail_collection.py
index 8873d51..8b330f9 100644
--- a/test/test_hitl_detail_collection.py
+++ b/test/test_hitl_detail_collection.py
@@ -54,27 +54,17 @@
                         options = [
                             ''
                             ], 
-                        params = airflow_client.client.models.params.Params(), 
-                        params_input = airflow_client.client.models.params_input.Params Input(), 
+                        params = { }, 
+                        params_input = { }, 
                         responded_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                        responded_by_user = airflow_client.client.models.hitl_user.HITLUser(
-                            id = '', 
-                            name = '', ), 
+                        responded_by_user = null, 
                         response_received = True, 
                         subject = '', 
                         task_instance = airflow_client.client.models.task_instance_response.TaskInstanceResponse(
                             dag_display_name = '', 
                             dag_id = '', 
                             dag_run_id = '', 
-                            dag_version = airflow_client.client.models.dag_version_response.DagVersionResponse(
-                                bundle_name = '', 
-                                bundle_url = '', 
-                                bundle_version = '', 
-                                created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                                dag_display_name = '', 
-                                dag_id = '', 
-                                id = '', 
-                                version_number = 56, ), 
+                            dag_version = null, 
                             duration = 1.337, 
                             end_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                             executor = '', 
@@ -93,31 +83,16 @@
                             priority_weight = 56, 
                             queue = '', 
                             queued_when = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                            rendered_fields = airflow_client.client.models.rendered_fields.Rendered Fields(), 
+                            rendered_fields = { }, 
                             rendered_map_index = '', 
                             run_after = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                             scheduled_when = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                             start_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                            state = 'removed', 
+                            state = null, 
                             task_display_name = '', 
                             task_id = '', 
-                            trigger = airflow_client.client.models.trigger_response.TriggerResponse(
-                                classpath = '', 
-                                created_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                                id = 56, 
-                                kwargs = '', 
-                                triggerer_id = 56, ), 
-                            triggerer_job = airflow_client.client.models.job_response.JobResponse(
-                                dag_display_name = '', 
-                                dag_id = '', 
-                                end_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                                executor_class = '', 
-                                hostname = '', 
-                                id = 56, 
-                                job_type = '', 
-                                latest_heartbeat = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                                start_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                                unixname = '', ), 
+                            trigger = null, 
+                            triggerer_job = null, 
                             try_number = 56, 
                             unixname = '', ), )
                     ],
@@ -144,27 +119,17 @@
                         options = [
                             ''
                             ], 
-                        params = airflow_client.client.models.params.Params(), 
-                        params_input = airflow_client.client.models.params_input.Params Input(), 
+                        params = { }, 
+                        params_input = { }, 
                         responded_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                        responded_by_user = airflow_client.client.models.hitl_user.HITLUser(
-                            id = '', 
-                            name = '', ), 
+                        responded_by_user = null, 
                         response_received = True, 
                         subject = '', 
                         task_instance = airflow_client.client.models.task_instance_response.TaskInstanceResponse(
                             dag_display_name = '', 
                             dag_id = '', 
                             dag_run_id = '', 
-                            dag_version = airflow_client.client.models.dag_version_response.DagVersionResponse(
-                                bundle_name = '', 
-                                bundle_url = '', 
-                                bundle_version = '', 
-                                created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                                dag_display_name = '', 
-                                dag_id = '', 
-                                id = '', 
-                                version_number = 56, ), 
+                            dag_version = null, 
                             duration = 1.337, 
                             end_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                             executor = '', 
@@ -183,31 +148,16 @@
                             priority_weight = 56, 
                             queue = '', 
                             queued_when = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                            rendered_fields = airflow_client.client.models.rendered_fields.Rendered Fields(), 
+                            rendered_fields = { }, 
                             rendered_map_index = '', 
                             run_after = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                             scheduled_when = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                             start_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                            state = 'removed', 
+                            state = null, 
                             task_display_name = '', 
                             task_id = '', 
-                            trigger = airflow_client.client.models.trigger_response.TriggerResponse(
-                                classpath = '', 
-                                created_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                                id = 56, 
-                                kwargs = '', 
-                                triggerer_id = 56, ), 
-                            triggerer_job = airflow_client.client.models.job_response.JobResponse(
-                                dag_display_name = '', 
-                                dag_id = '', 
-                                end_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                                executor_class = '', 
-                                hostname = '', 
-                                id = 56, 
-                                job_type = '', 
-                                latest_heartbeat = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                                start_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                                unixname = '', ), 
+                            trigger = null, 
+                            triggerer_job = null, 
                             try_number = 56, 
                             unixname = '', ), )
                     ],
diff --git a/test/test_hitl_detail_response.py b/test/test_hitl_detail_response.py
index b2ded33..fb19ed9 100644
--- a/test/test_hitl_detail_response.py
+++ b/test/test_hitl_detail_response.py
@@ -38,7 +38,7 @@
                 chosen_options = [
                     ''
                     ],
-                params_input = airflow_client.client.models.params_input.Params Input(),
+                params_input = { },
                 responded_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
                 responded_by = airflow_client.client.models.hitl_user.HITLUser(
                     id = '', 
diff --git a/test/test_plugin_collection_response.py b/test/test_plugin_collection_response.py
index 56e67cc..8a400ef 100644
--- a/test/test_plugin_collection_response.py
+++ b/test/test_plugin_collection_response.py
@@ -38,38 +38,19 @@
                 plugins = [
                     airflow_client.client.models.plugin_response.PluginResponse(
                         appbuilder_menu_items = [
-                            airflow_client.client.models.app_builder_menu_item_response.AppBuilderMenuItemResponse(
-                                category = '', 
-                                href = '', 
-                                name = '', )
+                            { }
                             ], 
                         appbuilder_views = [
-                            airflow_client.client.models.app_builder_view_response.AppBuilderViewResponse(
-                                category = '', 
-                                label = '', 
-                                name = '', 
-                                view = '', )
+                            { }
                             ], 
                         external_views = [
-                            airflow_client.client.models.external_view_response.ExternalViewResponse(
-                                category = '', 
-                                destination = 'nav', 
-                                href = '', 
-                                icon = '', 
-                                icon_dark_mode = '', 
-                                name = '', 
-                                url_route = '', )
+                            { }
                             ], 
                         fastapi_apps = [
-                            airflow_client.client.models.fast_api_app_response.FastAPIAppResponse(
-                                app = '', 
-                                name = '', 
-                                url_prefix = '', )
+                            { }
                             ], 
                         fastapi_root_middlewares = [
-                            airflow_client.client.models.fast_api_root_middleware_response.FastAPIRootMiddlewareResponse(
-                                middleware = '', 
-                                name = '', )
+                            { }
                             ], 
                         flask_blueprints = [
                             ''
@@ -88,14 +69,7 @@
                             ''
                             ], 
                         react_apps = [
-                            airflow_client.client.models.react_app_response.ReactAppResponse(
-                                bundle_url = '', 
-                                category = '', 
-                                destination = 'nav', 
-                                icon = '', 
-                                icon_dark_mode = '', 
-                                name = '', 
-                                url_route = '', )
+                            { }
                             ], 
                         source = '', 
                         timetables = [
@@ -109,38 +83,19 @@
                 plugins = [
                     airflow_client.client.models.plugin_response.PluginResponse(
                         appbuilder_menu_items = [
-                            airflow_client.client.models.app_builder_menu_item_response.AppBuilderMenuItemResponse(
-                                category = '', 
-                                href = '', 
-                                name = '', )
+                            { }
                             ], 
                         appbuilder_views = [
-                            airflow_client.client.models.app_builder_view_response.AppBuilderViewResponse(
-                                category = '', 
-                                label = '', 
-                                name = '', 
-                                view = '', )
+                            { }
                             ], 
                         external_views = [
-                            airflow_client.client.models.external_view_response.ExternalViewResponse(
-                                category = '', 
-                                destination = 'nav', 
-                                href = '', 
-                                icon = '', 
-                                icon_dark_mode = '', 
-                                name = '', 
-                                url_route = '', )
+                            { }
                             ], 
                         fastapi_apps = [
-                            airflow_client.client.models.fast_api_app_response.FastAPIAppResponse(
-                                app = '', 
-                                name = '', 
-                                url_prefix = '', )
+                            { }
                             ], 
                         fastapi_root_middlewares = [
-                            airflow_client.client.models.fast_api_root_middleware_response.FastAPIRootMiddlewareResponse(
-                                middleware = '', 
-                                name = '', )
+                            { }
                             ], 
                         flask_blueprints = [
                             ''
@@ -159,14 +114,7 @@
                             ''
                             ], 
                         react_apps = [
-                            airflow_client.client.models.react_app_response.ReactAppResponse(
-                                bundle_url = '', 
-                                category = '', 
-                                destination = 'nav', 
-                                icon = '', 
-                                icon_dark_mode = '', 
-                                name = '', 
-                                url_route = '', )
+                            { }
                             ], 
                         source = '', 
                         timetables = [
diff --git a/test/test_plugin_response.py b/test/test_plugin_response.py
index 2a69944..151f766 100644
--- a/test/test_plugin_response.py
+++ b/test/test_plugin_response.py
@@ -36,38 +36,19 @@
         if include_optional:
             return PluginResponse(
                 appbuilder_menu_items = [
-                    airflow_client.client.models.app_builder_menu_item_response.AppBuilderMenuItemResponse(
-                        category = '', 
-                        href = '', 
-                        name = '', )
+                    { }
                     ],
                 appbuilder_views = [
-                    airflow_client.client.models.app_builder_view_response.AppBuilderViewResponse(
-                        category = '', 
-                        label = '', 
-                        name = '', 
-                        view = '', )
+                    { }
                     ],
                 external_views = [
-                    airflow_client.client.models.external_view_response.ExternalViewResponse(
-                        category = '', 
-                        destination = 'nav', 
-                        href = '', 
-                        icon = '', 
-                        icon_dark_mode = '', 
-                        name = '', 
-                        url_route = '', )
+                    { }
                     ],
                 fastapi_apps = [
-                    airflow_client.client.models.fast_api_app_response.FastAPIAppResponse(
-                        app = '', 
-                        name = '', 
-                        url_prefix = '', )
+                    { }
                     ],
                 fastapi_root_middlewares = [
-                    airflow_client.client.models.fast_api_root_middleware_response.FastAPIRootMiddlewareResponse(
-                        middleware = '', 
-                        name = '', )
+                    { }
                     ],
                 flask_blueprints = [
                     ''
@@ -86,14 +67,7 @@
                     ''
                     ],
                 react_apps = [
-                    airflow_client.client.models.react_app_response.ReactAppResponse(
-                        bundle_url = '', 
-                        category = '', 
-                        destination = 'nav', 
-                        icon = '', 
-                        icon_dark_mode = '', 
-                        name = '', 
-                        url_route = '', )
+                    { }
                     ],
                 source = '',
                 timetables = [
@@ -103,38 +77,19 @@
         else:
             return PluginResponse(
                 appbuilder_menu_items = [
-                    airflow_client.client.models.app_builder_menu_item_response.AppBuilderMenuItemResponse(
-                        category = '', 
-                        href = '', 
-                        name = '', )
+                    { }
                     ],
                 appbuilder_views = [
-                    airflow_client.client.models.app_builder_view_response.AppBuilderViewResponse(
-                        category = '', 
-                        label = '', 
-                        name = '', 
-                        view = '', )
+                    { }
                     ],
                 external_views = [
-                    airflow_client.client.models.external_view_response.ExternalViewResponse(
-                        category = '', 
-                        destination = 'nav', 
-                        href = '', 
-                        icon = '', 
-                        icon_dark_mode = '', 
-                        name = '', 
-                        url_route = '', )
+                    { }
                     ],
                 fastapi_apps = [
-                    airflow_client.client.models.fast_api_app_response.FastAPIAppResponse(
-                        app = '', 
-                        name = '', 
-                        url_prefix = '', )
+                    { }
                     ],
                 fastapi_root_middlewares = [
-                    airflow_client.client.models.fast_api_root_middleware_response.FastAPIRootMiddlewareResponse(
-                        middleware = '', 
-                        name = '', )
+                    { }
                     ],
                 flask_blueprints = [
                     ''
@@ -153,14 +108,7 @@
                     ''
                     ],
                 react_apps = [
-                    airflow_client.client.models.react_app_response.ReactAppResponse(
-                        bundle_url = '', 
-                        category = '', 
-                        destination = 'nav', 
-                        icon = '', 
-                        icon_dark_mode = '', 
-                        name = '', 
-                        url_route = '', )
+                    { }
                     ],
                 source = '',
                 timetables = [
diff --git a/test/test_provider_collection_response.py b/test/test_provider_collection_response.py
index 27297c2..114fc0d 100644
--- a/test/test_provider_collection_response.py
+++ b/test/test_provider_collection_response.py
@@ -38,6 +38,7 @@
                 providers = [
                     airflow_client.client.models.provider_response.ProviderResponse(
                         description = '', 
+                        documentation_url = '', 
                         package_name = '', 
                         version = '', )
                     ],
@@ -48,6 +49,7 @@
                 providers = [
                     airflow_client.client.models.provider_response.ProviderResponse(
                         description = '', 
+                        documentation_url = '', 
                         package_name = '', 
                         version = '', )
                     ],
diff --git a/test/test_provider_response.py b/test/test_provider_response.py
index 71b89e7..606f7a8 100644
--- a/test/test_provider_response.py
+++ b/test/test_provider_response.py
@@ -36,6 +36,7 @@
         if include_optional:
             return ProviderResponse(
                 description = '',
+                documentation_url = '',
                 package_name = '',
                 version = ''
             )
diff --git a/test/test_response_clear_dag_run.py b/test/test_response_clear_dag_run.py
index a036ab8..1add495 100644
--- a/test/test_response_clear_dag_run.py
+++ b/test/test_response_clear_dag_run.py
@@ -40,15 +40,7 @@
                         dag_display_name = '', 
                         dag_id = '', 
                         dag_run_id = '', 
-                        dag_version = airflow_client.client.models.dag_version_response.DagVersionResponse(
-                            bundle_name = '', 
-                            bundle_url = '', 
-                            bundle_version = '', 
-                            created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                            dag_display_name = '', 
-                            dag_id = '', 
-                            id = '', 
-                            version_number = 56, ), 
+                        dag_version = null, 
                         duration = 1.337, 
                         end_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                         executor = '', 
@@ -67,37 +59,22 @@
                         priority_weight = 56, 
                         queue = '', 
                         queued_when = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                        rendered_fields = airflow_client.client.models.rendered_fields.Rendered Fields(), 
+                        rendered_fields = { }, 
                         rendered_map_index = '', 
                         run_after = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                         scheduled_when = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                         start_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                        state = 'removed', 
+                        state = null, 
                         task_display_name = '', 
                         task_id = '', 
-                        trigger = airflow_client.client.models.trigger_response.TriggerResponse(
-                            classpath = '', 
-                            created_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                            id = 56, 
-                            kwargs = '', 
-                            triggerer_id = 56, ), 
-                        triggerer_job = airflow_client.client.models.job_response.JobResponse(
-                            dag_display_name = '', 
-                            dag_id = '', 
-                            end_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                            executor_class = '', 
-                            hostname = '', 
-                            id = 56, 
-                            job_type = '', 
-                            latest_heartbeat = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                            start_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                            unixname = '', ), 
+                        trigger = null, 
+                        triggerer_job = null, 
                         try_number = 56, 
                         unixname = '', )
                     ],
                 total_entries = 56,
                 bundle_version = '',
-                conf = airflow_client.client.models.conf.conf(),
+                conf = { },
                 dag_display_name = '',
                 dag_id = '',
                 dag_run_id = '',
@@ -134,15 +111,7 @@
                         dag_display_name = '', 
                         dag_id = '', 
                         dag_run_id = '', 
-                        dag_version = airflow_client.client.models.dag_version_response.DagVersionResponse(
-                            bundle_name = '', 
-                            bundle_url = '', 
-                            bundle_version = '', 
-                            created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                            dag_display_name = '', 
-                            dag_id = '', 
-                            id = '', 
-                            version_number = 56, ), 
+                        dag_version = null, 
                         duration = 1.337, 
                         end_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                         executor = '', 
@@ -161,31 +130,16 @@
                         priority_weight = 56, 
                         queue = '', 
                         queued_when = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                        rendered_fields = airflow_client.client.models.rendered_fields.Rendered Fields(), 
+                        rendered_fields = { }, 
                         rendered_map_index = '', 
                         run_after = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                         scheduled_when = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                         start_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                        state = 'removed', 
+                        state = null, 
                         task_display_name = '', 
                         task_id = '', 
-                        trigger = airflow_client.client.models.trigger_response.TriggerResponse(
-                            classpath = '', 
-                            created_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                            id = 56, 
-                            kwargs = '', 
-                            triggerer_id = 56, ), 
-                        triggerer_job = airflow_client.client.models.job_response.JobResponse(
-                            dag_display_name = '', 
-                            dag_id = '', 
-                            end_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                            executor_class = '', 
-                            hostname = '', 
-                            id = 56, 
-                            job_type = '', 
-                            latest_heartbeat = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                            start_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                            unixname = '', ), 
+                        trigger = null, 
+                        triggerer_job = null, 
                         try_number = 56, 
                         unixname = '', )
                     ],
diff --git a/test/test_task_collection_response.py b/test/test_task_collection_response.py
index 1a1653a..1d9bb4a 100644
--- a/test/test_task_collection_response.py
+++ b/test/test_task_collection_response.py
@@ -37,35 +37,27 @@
             return TaskCollectionResponse(
                 tasks = [
                     airflow_client.client.models.task_response.TaskResponse(
-                        class_ref = airflow_client.client.models.conf.conf(), 
+                        class_ref = { }, 
                         depends_on_past = True, 
                         doc_md = '', 
                         downstream_task_ids = [
                             ''
                             ], 
                         end_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                        execution_timeout = airflow_client.client.models.time_delta.TimeDelta(
-                            __type = 'TimeDelta', 
-                            days = 56, 
-                            microseconds = 56, 
-                            seconds = 56, ), 
+                        execution_timeout = null, 
                         extra_links = [
                             ''
                             ], 
                         is_mapped = True, 
                         operator_name = '', 
                         owner = '', 
-                        params = airflow_client.client.models.conf.conf(), 
+                        params = { }, 
                         pool = '', 
                         pool_slots = 1.337, 
                         priority_weight = 1.337, 
                         queue = '', 
                         retries = 1.337, 
-                        retry_delay = airflow_client.client.models.time_delta.TimeDelta(
-                            __type = 'TimeDelta', 
-                            days = 56, 
-                            microseconds = 56, 
-                            seconds = 56, ), 
+                        retry_delay = null, 
                         retry_exponential_backoff = True, 
                         start_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                         task_display_name = '', 
@@ -85,35 +77,27 @@
             return TaskCollectionResponse(
                 tasks = [
                     airflow_client.client.models.task_response.TaskResponse(
-                        class_ref = airflow_client.client.models.conf.conf(), 
+                        class_ref = { }, 
                         depends_on_past = True, 
                         doc_md = '', 
                         downstream_task_ids = [
                             ''
                             ], 
                         end_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                        execution_timeout = airflow_client.client.models.time_delta.TimeDelta(
-                            __type = 'TimeDelta', 
-                            days = 56, 
-                            microseconds = 56, 
-                            seconds = 56, ), 
+                        execution_timeout = null, 
                         extra_links = [
                             ''
                             ], 
                         is_mapped = True, 
                         operator_name = '', 
                         owner = '', 
-                        params = airflow_client.client.models.conf.conf(), 
+                        params = { }, 
                         pool = '', 
                         pool_slots = 1.337, 
                         priority_weight = 1.337, 
                         queue = '', 
                         retries = 1.337, 
-                        retry_delay = airflow_client.client.models.time_delta.TimeDelta(
-                            __type = 'TimeDelta', 
-                            days = 56, 
-                            microseconds = 56, 
-                            seconds = 56, ), 
+                        retry_delay = null, 
                         retry_exponential_backoff = True, 
                         start_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                         task_display_name = '', 
diff --git a/test/test_task_instance_collection_response.py b/test/test_task_instance_collection_response.py
index 572a4ac..0bb10b8 100644
--- a/test/test_task_instance_collection_response.py
+++ b/test/test_task_instance_collection_response.py
@@ -40,15 +40,7 @@
                         dag_display_name = '', 
                         dag_id = '', 
                         dag_run_id = '', 
-                        dag_version = airflow_client.client.models.dag_version_response.DagVersionResponse(
-                            bundle_name = '', 
-                            bundle_url = '', 
-                            bundle_version = '', 
-                            created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                            dag_display_name = '', 
-                            dag_id = '', 
-                            id = '', 
-                            version_number = 56, ), 
+                        dag_version = null, 
                         duration = 1.337, 
                         end_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                         executor = '', 
@@ -67,31 +59,16 @@
                         priority_weight = 56, 
                         queue = '', 
                         queued_when = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                        rendered_fields = airflow_client.client.models.rendered_fields.Rendered Fields(), 
+                        rendered_fields = { }, 
                         rendered_map_index = '', 
                         run_after = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                         scheduled_when = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                         start_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                        state = 'removed', 
+                        state = null, 
                         task_display_name = '', 
                         task_id = '', 
-                        trigger = airflow_client.client.models.trigger_response.TriggerResponse(
-                            classpath = '', 
-                            created_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                            id = 56, 
-                            kwargs = '', 
-                            triggerer_id = 56, ), 
-                        triggerer_job = airflow_client.client.models.job_response.JobResponse(
-                            dag_display_name = '', 
-                            dag_id = '', 
-                            end_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                            executor_class = '', 
-                            hostname = '', 
-                            id = 56, 
-                            job_type = '', 
-                            latest_heartbeat = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                            start_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                            unixname = '', ), 
+                        trigger = null, 
+                        triggerer_job = null, 
                         try_number = 56, 
                         unixname = '', )
                     ],
@@ -104,15 +81,7 @@
                         dag_display_name = '', 
                         dag_id = '', 
                         dag_run_id = '', 
-                        dag_version = airflow_client.client.models.dag_version_response.DagVersionResponse(
-                            bundle_name = '', 
-                            bundle_url = '', 
-                            bundle_version = '', 
-                            created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                            dag_display_name = '', 
-                            dag_id = '', 
-                            id = '', 
-                            version_number = 56, ), 
+                        dag_version = null, 
                         duration = 1.337, 
                         end_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                         executor = '', 
@@ -131,31 +100,16 @@
                         priority_weight = 56, 
                         queue = '', 
                         queued_when = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                        rendered_fields = airflow_client.client.models.rendered_fields.Rendered Fields(), 
+                        rendered_fields = { }, 
                         rendered_map_index = '', 
                         run_after = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                         scheduled_when = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                         start_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                        state = 'removed', 
+                        state = null, 
                         task_display_name = '', 
                         task_id = '', 
-                        trigger = airflow_client.client.models.trigger_response.TriggerResponse(
-                            classpath = '', 
-                            created_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                            id = 56, 
-                            kwargs = '', 
-                            triggerer_id = 56, ), 
-                        triggerer_job = airflow_client.client.models.job_response.JobResponse(
-                            dag_display_name = '', 
-                            dag_id = '', 
-                            end_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                            executor_class = '', 
-                            hostname = '', 
-                            id = 56, 
-                            job_type = '', 
-                            latest_heartbeat = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                            start_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                            unixname = '', ), 
+                        trigger = null, 
+                        triggerer_job = null, 
                         try_number = 56, 
                         unixname = '', )
                     ],
diff --git a/test/test_task_instance_history_collection_response.py b/test/test_task_instance_history_collection_response.py
index f67fcfa..d1719b1 100644
--- a/test/test_task_instance_history_collection_response.py
+++ b/test/test_task_instance_history_collection_response.py
@@ -40,15 +40,7 @@
                         dag_display_name = '', 
                         dag_id = '', 
                         dag_run_id = '', 
-                        dag_version = airflow_client.client.models.dag_version_response.DagVersionResponse(
-                            bundle_name = '', 
-                            bundle_url = '', 
-                            bundle_version = '', 
-                            created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                            dag_display_name = '', 
-                            dag_id = '', 
-                            id = '', 
-                            version_number = 56, ), 
+                        dag_version = null, 
                         duration = 1.337, 
                         end_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                         executor = '', 
@@ -66,7 +58,7 @@
                         queued_when = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                         scheduled_when = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                         start_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                        state = 'removed', 
+                        state = null, 
                         task_display_name = '', 
                         task_id = '', 
                         try_number = 56, 
@@ -81,15 +73,7 @@
                         dag_display_name = '', 
                         dag_id = '', 
                         dag_run_id = '', 
-                        dag_version = airflow_client.client.models.dag_version_response.DagVersionResponse(
-                            bundle_name = '', 
-                            bundle_url = '', 
-                            bundle_version = '', 
-                            created_at = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                            dag_display_name = '', 
-                            dag_id = '', 
-                            id = '', 
-                            version_number = 56, ), 
+                        dag_version = null, 
                         duration = 1.337, 
                         end_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                         executor = '', 
@@ -107,7 +91,7 @@
                         queued_when = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                         scheduled_when = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
                         start_date = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'), 
-                        state = 'removed', 
+                        state = null, 
                         task_display_name = '', 
                         task_id = '', 
                         try_number = 56, 
diff --git a/test/test_task_instance_response.py b/test/test_task_instance_response.py
index f69d77f..c27b8ee 100644
--- a/test/test_task_instance_response.py
+++ b/test/test_task_instance_response.py
@@ -65,7 +65,7 @@
                 priority_weight = 56,
                 queue = '',
                 queued_when = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
-                rendered_fields = airflow_client.client.models.rendered_fields.Rendered Fields(),
+                rendered_fields = { },
                 rendered_map_index = '',
                 run_after = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
                 scheduled_when = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
diff --git a/test/test_task_instances_batch_body.py b/test/test_task_instances_batch_body.py
index 754a18b..6e2a8cf 100644
--- a/test/test_task_instances_batch_body.py
+++ b/test/test_task_instances_batch_body.py
@@ -74,7 +74,7 @@
                 start_date_lt = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
                 start_date_lte = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
                 state = [
-                    'removed'
+                    null
                     ],
                 task_ids = [
                     ''
diff --git a/test/test_task_response.py b/test/test_task_response.py
index 1997500..b38fb93 100644
--- a/test/test_task_response.py
+++ b/test/test_task_response.py
@@ -35,7 +35,7 @@
         model = TaskResponse()
         if include_optional:
             return TaskResponse(
-                class_ref = airflow_client.client.models.conf.conf(),
+                class_ref = { },
                 depends_on_past = True,
                 doc_md = '',
                 downstream_task_ids = [
@@ -53,7 +53,7 @@
                 is_mapped = True,
                 operator_name = '',
                 owner = '',
-                params = airflow_client.client.models.conf.conf(),
+                params = { },
                 pool = '',
                 pool_slots = 1.337,
                 priority_weight = 1.337,
diff --git a/test/test_trigger_dag_run_post_body.py b/test/test_trigger_dag_run_post_body.py
index 81c1f27..af5ba45 100644
--- a/test/test_trigger_dag_run_post_body.py
+++ b/test/test_trigger_dag_run_post_body.py
@@ -35,7 +35,7 @@
         model = TriggerDAGRunPostBody()
         if include_optional:
             return TriggerDAGRunPostBody(
-                conf = airflow_client.client.models.conf.conf(),
+                conf = { },
                 dag_run_id = '',
                 data_interval_end = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
                 data_interval_start = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
diff --git a/test/test_update_hitl_detail_payload.py b/test/test_update_hitl_detail_payload.py
index 4314f4b..9b0a045 100644
--- a/test/test_update_hitl_detail_payload.py
+++ b/test/test_update_hitl_detail_payload.py
@@ -38,7 +38,7 @@
                 chosen_options = [
                     ''
                     ],
-                params_input = airflow_client.client.models.params_input.Params Input()
+                params_input = { }
             )
         else:
             return UpdateHITLDetailPayload(
diff --git a/version.txt b/version.txt
index 0aec50e..9cec716 100644
--- a/version.txt
+++ b/version.txt
@@ -1 +1 @@
-3.1.4
+3.1.6