This closes #2143
diff --git a/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py b/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py
index 725d496..f280217 100644
--- a/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py
+++ b/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_client.py
@@ -70,6 +70,33 @@
def __init__(self, client):
super(DataflowV1b3.ProjectsJobsDebugService, self).__init__(client)
+ self._method_configs = {
+ 'GetConfig': base_api.ApiMethodInfo(
+ http_method=u'POST',
+ method_id=u'dataflow.projects.jobs.debug.getConfig',
+ ordered_params=[u'projectId', u'jobId'],
+ path_params=[u'jobId', u'projectId'],
+ query_params=[],
+ relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}/debug/getConfig',
+ request_field=u'getDebugConfigRequest',
+ request_type_name=u'DataflowProjectsJobsDebugGetConfigRequest',
+ response_type_name=u'GetDebugConfigResponse',
+ supports_download=False,
+ ),
+ 'SendCapture': base_api.ApiMethodInfo(
+ http_method=u'POST',
+ method_id=u'dataflow.projects.jobs.debug.sendCapture',
+ ordered_params=[u'projectId', u'jobId'],
+ path_params=[u'jobId', u'projectId'],
+ query_params=[],
+ relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}/debug/sendCapture',
+ request_field=u'sendDebugCaptureRequest',
+ request_type_name=u'DataflowProjectsJobsDebugSendCaptureRequest',
+ response_type_name=u'SendDebugCaptureResponse',
+ supports_download=False,
+ ),
+ }
+
self._upload_configs = {
}
@@ -86,19 +113,6 @@
return self._RunMethod(
config, request, global_params=global_params)
- GetConfig.method_config = lambda: base_api.ApiMethodInfo(
- http_method=u'POST',
- method_id=u'dataflow.projects.jobs.debug.getConfig',
- ordered_params=[u'projectId', u'jobId'],
- path_params=[u'jobId', u'projectId'],
- query_params=[],
- relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}/debug/getConfig',
- request_field=u'getDebugConfigRequest',
- request_type_name=u'DataflowProjectsJobsDebugGetConfigRequest',
- response_type_name=u'GetDebugConfigResponse',
- supports_download=False,
- )
-
def SendCapture(self, request, global_params=None):
"""Send encoded debug capture data for component.
@@ -112,19 +126,6 @@
return self._RunMethod(
config, request, global_params=global_params)
- SendCapture.method_config = lambda: base_api.ApiMethodInfo(
- http_method=u'POST',
- method_id=u'dataflow.projects.jobs.debug.sendCapture',
- ordered_params=[u'projectId', u'jobId'],
- path_params=[u'jobId', u'projectId'],
- query_params=[],
- relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}/debug/sendCapture',
- request_field=u'sendDebugCaptureRequest',
- request_type_name=u'DataflowProjectsJobsDebugSendCaptureRequest',
- response_type_name=u'SendDebugCaptureResponse',
- supports_download=False,
- )
-
class ProjectsJobsMessagesService(base_api.BaseApiService):
"""Service class for the projects_jobs_messages resource."""
@@ -132,6 +133,21 @@
def __init__(self, client):
super(DataflowV1b3.ProjectsJobsMessagesService, self).__init__(client)
+ self._method_configs = {
+ 'List': base_api.ApiMethodInfo(
+ http_method=u'GET',
+ method_id=u'dataflow.projects.jobs.messages.list',
+ ordered_params=[u'projectId', u'jobId'],
+ path_params=[u'jobId', u'projectId'],
+ query_params=[u'endTime', u'location', u'minimumImportance', u'pageSize', u'pageToken', u'startTime'],
+ relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}/messages',
+ request_field='',
+ request_type_name=u'DataflowProjectsJobsMessagesListRequest',
+ response_type_name=u'ListJobMessagesResponse',
+ supports_download=False,
+ ),
+ }
+
self._upload_configs = {
}
@@ -148,19 +164,6 @@
return self._RunMethod(
config, request, global_params=global_params)
- List.method_config = lambda: base_api.ApiMethodInfo(
- http_method=u'GET',
- method_id=u'dataflow.projects.jobs.messages.list',
- ordered_params=[u'projectId', u'jobId'],
- path_params=[u'jobId', u'projectId'],
- query_params=[u'endTime', u'location', u'minimumImportance', u'pageSize', u'pageToken', u'startTime'],
- relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}/messages',
- request_field='',
- request_type_name=u'DataflowProjectsJobsMessagesListRequest',
- response_type_name=u'ListJobMessagesResponse',
- supports_download=False,
- )
-
class ProjectsJobsWorkItemsService(base_api.BaseApiService):
"""Service class for the projects_jobs_workItems resource."""
@@ -168,6 +171,33 @@
def __init__(self, client):
super(DataflowV1b3.ProjectsJobsWorkItemsService, self).__init__(client)
+ self._method_configs = {
+ 'Lease': base_api.ApiMethodInfo(
+ http_method=u'POST',
+ method_id=u'dataflow.projects.jobs.workItems.lease',
+ ordered_params=[u'projectId', u'jobId'],
+ path_params=[u'jobId', u'projectId'],
+ query_params=[],
+ relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}/workItems:lease',
+ request_field=u'leaseWorkItemRequest',
+ request_type_name=u'DataflowProjectsJobsWorkItemsLeaseRequest',
+ response_type_name=u'LeaseWorkItemResponse',
+ supports_download=False,
+ ),
+ 'ReportStatus': base_api.ApiMethodInfo(
+ http_method=u'POST',
+ method_id=u'dataflow.projects.jobs.workItems.reportStatus',
+ ordered_params=[u'projectId', u'jobId'],
+ path_params=[u'jobId', u'projectId'],
+ query_params=[],
+ relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}/workItems:reportStatus',
+ request_field=u'reportWorkItemStatusRequest',
+ request_type_name=u'DataflowProjectsJobsWorkItemsReportStatusRequest',
+ response_type_name=u'ReportWorkItemStatusResponse',
+ supports_download=False,
+ ),
+ }
+
self._upload_configs = {
}
@@ -184,19 +214,6 @@
return self._RunMethod(
config, request, global_params=global_params)
- Lease.method_config = lambda: base_api.ApiMethodInfo(
- http_method=u'POST',
- method_id=u'dataflow.projects.jobs.workItems.lease',
- ordered_params=[u'projectId', u'jobId'],
- path_params=[u'jobId', u'projectId'],
- query_params=[],
- relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}/workItems:lease',
- request_field=u'leaseWorkItemRequest',
- request_type_name=u'DataflowProjectsJobsWorkItemsLeaseRequest',
- response_type_name=u'LeaseWorkItemResponse',
- supports_download=False,
- )
-
def ReportStatus(self, request, global_params=None):
"""Reports the status of dataflow WorkItems leased by a worker.
@@ -210,19 +227,6 @@
return self._RunMethod(
config, request, global_params=global_params)
- ReportStatus.method_config = lambda: base_api.ApiMethodInfo(
- http_method=u'POST',
- method_id=u'dataflow.projects.jobs.workItems.reportStatus',
- ordered_params=[u'projectId', u'jobId'],
- path_params=[u'jobId', u'projectId'],
- query_params=[],
- relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}/workItems:reportStatus',
- request_field=u'reportWorkItemStatusRequest',
- request_type_name=u'DataflowProjectsJobsWorkItemsReportStatusRequest',
- response_type_name=u'ReportWorkItemStatusResponse',
- supports_download=False,
- )
-
class ProjectsJobsService(base_api.BaseApiService):
"""Service class for the projects_jobs resource."""
@@ -230,6 +234,69 @@
def __init__(self, client):
super(DataflowV1b3.ProjectsJobsService, self).__init__(client)
+ self._method_configs = {
+ 'Create': base_api.ApiMethodInfo(
+ http_method=u'POST',
+ method_id=u'dataflow.projects.jobs.create',
+ ordered_params=[u'projectId'],
+ path_params=[u'projectId'],
+ query_params=[u'location', u'replaceJobId', u'view'],
+ relative_path=u'v1b3/projects/{projectId}/jobs',
+ request_field=u'job',
+ request_type_name=u'DataflowProjectsJobsCreateRequest',
+ response_type_name=u'Job',
+ supports_download=False,
+ ),
+ 'Get': base_api.ApiMethodInfo(
+ http_method=u'GET',
+ method_id=u'dataflow.projects.jobs.get',
+ ordered_params=[u'projectId', u'jobId'],
+ path_params=[u'jobId', u'projectId'],
+ query_params=[u'location', u'view'],
+ relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}',
+ request_field='',
+ request_type_name=u'DataflowProjectsJobsGetRequest',
+ response_type_name=u'Job',
+ supports_download=False,
+ ),
+ 'GetMetrics': base_api.ApiMethodInfo(
+ http_method=u'GET',
+ method_id=u'dataflow.projects.jobs.getMetrics',
+ ordered_params=[u'projectId', u'jobId'],
+ path_params=[u'jobId', u'projectId'],
+ query_params=[u'location', u'startTime'],
+ relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}/metrics',
+ request_field='',
+ request_type_name=u'DataflowProjectsJobsGetMetricsRequest',
+ response_type_name=u'JobMetrics',
+ supports_download=False,
+ ),
+ 'List': base_api.ApiMethodInfo(
+ http_method=u'GET',
+ method_id=u'dataflow.projects.jobs.list',
+ ordered_params=[u'projectId'],
+ path_params=[u'projectId'],
+ query_params=[u'filter', u'location', u'pageSize', u'pageToken', u'view'],
+ relative_path=u'v1b3/projects/{projectId}/jobs',
+ request_field='',
+ request_type_name=u'DataflowProjectsJobsListRequest',
+ response_type_name=u'ListJobsResponse',
+ supports_download=False,
+ ),
+ 'Update': base_api.ApiMethodInfo(
+ http_method=u'PUT',
+ method_id=u'dataflow.projects.jobs.update',
+ ordered_params=[u'projectId', u'jobId'],
+ path_params=[u'jobId', u'projectId'],
+ query_params=[u'location'],
+ relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}',
+ request_field=u'job',
+ request_type_name=u'DataflowProjectsJobsUpdateRequest',
+ response_type_name=u'Job',
+ supports_download=False,
+ ),
+ }
+
self._upload_configs = {
}
@@ -246,19 +313,6 @@
return self._RunMethod(
config, request, global_params=global_params)
- Create.method_config = lambda: base_api.ApiMethodInfo(
- http_method=u'POST',
- method_id=u'dataflow.projects.jobs.create',
- ordered_params=[u'projectId'],
- path_params=[u'projectId'],
- query_params=[u'location', u'replaceJobId', u'view'],
- relative_path=u'v1b3/projects/{projectId}/jobs',
- request_field=u'job',
- request_type_name=u'DataflowProjectsJobsCreateRequest',
- response_type_name=u'Job',
- supports_download=False,
- )
-
def Get(self, request, global_params=None):
"""Gets the state of the specified Cloud Dataflow job.
@@ -272,19 +326,6 @@
return self._RunMethod(
config, request, global_params=global_params)
- Get.method_config = lambda: base_api.ApiMethodInfo(
- http_method=u'GET',
- method_id=u'dataflow.projects.jobs.get',
- ordered_params=[u'projectId', u'jobId'],
- path_params=[u'jobId', u'projectId'],
- query_params=[u'location', u'view'],
- relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}',
- request_field='',
- request_type_name=u'DataflowProjectsJobsGetRequest',
- response_type_name=u'Job',
- supports_download=False,
- )
-
def GetMetrics(self, request, global_params=None):
"""Request the job status.
@@ -298,19 +339,6 @@
return self._RunMethod(
config, request, global_params=global_params)
- GetMetrics.method_config = lambda: base_api.ApiMethodInfo(
- http_method=u'GET',
- method_id=u'dataflow.projects.jobs.getMetrics',
- ordered_params=[u'projectId', u'jobId'],
- path_params=[u'jobId', u'projectId'],
- query_params=[u'location', u'startTime'],
- relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}/metrics',
- request_field='',
- request_type_name=u'DataflowProjectsJobsGetMetricsRequest',
- response_type_name=u'JobMetrics',
- supports_download=False,
- )
-
def List(self, request, global_params=None):
"""List the jobs of a project.
@@ -324,19 +352,6 @@
return self._RunMethod(
config, request, global_params=global_params)
- List.method_config = lambda: base_api.ApiMethodInfo(
- http_method=u'GET',
- method_id=u'dataflow.projects.jobs.list',
- ordered_params=[u'projectId'],
- path_params=[u'projectId'],
- query_params=[u'filter', u'location', u'pageSize', u'pageToken', u'view'],
- relative_path=u'v1b3/projects/{projectId}/jobs',
- request_field='',
- request_type_name=u'DataflowProjectsJobsListRequest',
- response_type_name=u'ListJobsResponse',
- supports_download=False,
- )
-
def Update(self, request, global_params=None):
"""Updates the state of an existing Cloud Dataflow job.
@@ -350,19 +365,6 @@
return self._RunMethod(
config, request, global_params=global_params)
- Update.method_config = lambda: base_api.ApiMethodInfo(
- http_method=u'PUT',
- method_id=u'dataflow.projects.jobs.update',
- ordered_params=[u'projectId', u'jobId'],
- path_params=[u'jobId', u'projectId'],
- query_params=[u'location'],
- relative_path=u'v1b3/projects/{projectId}/jobs/{jobId}',
- request_field=u'job',
- request_type_name=u'DataflowProjectsJobsUpdateRequest',
- response_type_name=u'Job',
- supports_download=False,
- )
-
class ProjectsLocationsJobsMessagesService(base_api.BaseApiService):
"""Service class for the projects_locations_jobs_messages resource."""
@@ -370,6 +372,21 @@
def __init__(self, client):
super(DataflowV1b3.ProjectsLocationsJobsMessagesService, self).__init__(client)
+ self._method_configs = {
+ 'List': base_api.ApiMethodInfo(
+ http_method=u'GET',
+ method_id=u'dataflow.projects.locations.jobs.messages.list',
+ ordered_params=[u'projectId', u'location', u'jobId'],
+ path_params=[u'jobId', u'location', u'projectId'],
+ query_params=[u'endTime', u'minimumImportance', u'pageSize', u'pageToken', u'startTime'],
+ relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/messages',
+ request_field='',
+ request_type_name=u'DataflowProjectsLocationsJobsMessagesListRequest',
+ response_type_name=u'ListJobMessagesResponse',
+ supports_download=False,
+ ),
+ }
+
self._upload_configs = {
}
@@ -386,19 +403,6 @@
return self._RunMethod(
config, request, global_params=global_params)
- List.method_config = lambda: base_api.ApiMethodInfo(
- http_method=u'GET',
- method_id=u'dataflow.projects.locations.jobs.messages.list',
- ordered_params=[u'projectId', u'location', u'jobId'],
- path_params=[u'jobId', u'location', u'projectId'],
- query_params=[u'endTime', u'minimumImportance', u'pageSize', u'pageToken', u'startTime'],
- relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/messages',
- request_field='',
- request_type_name=u'DataflowProjectsLocationsJobsMessagesListRequest',
- response_type_name=u'ListJobMessagesResponse',
- supports_download=False,
- )
-
class ProjectsLocationsJobsWorkItemsService(base_api.BaseApiService):
"""Service class for the projects_locations_jobs_workItems resource."""
@@ -406,6 +410,33 @@
def __init__(self, client):
super(DataflowV1b3.ProjectsLocationsJobsWorkItemsService, self).__init__(client)
+ self._method_configs = {
+ 'Lease': base_api.ApiMethodInfo(
+ http_method=u'POST',
+ method_id=u'dataflow.projects.locations.jobs.workItems.lease',
+ ordered_params=[u'projectId', u'location', u'jobId'],
+ path_params=[u'jobId', u'location', u'projectId'],
+ query_params=[],
+ relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/workItems:lease',
+ request_field=u'leaseWorkItemRequest',
+ request_type_name=u'DataflowProjectsLocationsJobsWorkItemsLeaseRequest',
+ response_type_name=u'LeaseWorkItemResponse',
+ supports_download=False,
+ ),
+ 'ReportStatus': base_api.ApiMethodInfo(
+ http_method=u'POST',
+ method_id=u'dataflow.projects.locations.jobs.workItems.reportStatus',
+ ordered_params=[u'projectId', u'location', u'jobId'],
+ path_params=[u'jobId', u'location', u'projectId'],
+ query_params=[],
+ relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/workItems:reportStatus',
+ request_field=u'reportWorkItemStatusRequest',
+ request_type_name=u'DataflowProjectsLocationsJobsWorkItemsReportStatusRequest',
+ response_type_name=u'ReportWorkItemStatusResponse',
+ supports_download=False,
+ ),
+ }
+
self._upload_configs = {
}
@@ -422,19 +453,6 @@
return self._RunMethod(
config, request, global_params=global_params)
- Lease.method_config = lambda: base_api.ApiMethodInfo(
- http_method=u'POST',
- method_id=u'dataflow.projects.locations.jobs.workItems.lease',
- ordered_params=[u'projectId', u'location', u'jobId'],
- path_params=[u'jobId', u'location', u'projectId'],
- query_params=[],
- relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/workItems:lease',
- request_field=u'leaseWorkItemRequest',
- request_type_name=u'DataflowProjectsLocationsJobsWorkItemsLeaseRequest',
- response_type_name=u'LeaseWorkItemResponse',
- supports_download=False,
- )
-
def ReportStatus(self, request, global_params=None):
"""Reports the status of dataflow WorkItems leased by a worker.
@@ -448,19 +466,6 @@
return self._RunMethod(
config, request, global_params=global_params)
- ReportStatus.method_config = lambda: base_api.ApiMethodInfo(
- http_method=u'POST',
- method_id=u'dataflow.projects.locations.jobs.workItems.reportStatus',
- ordered_params=[u'projectId', u'location', u'jobId'],
- path_params=[u'jobId', u'location', u'projectId'],
- query_params=[],
- relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/workItems:reportStatus',
- request_field=u'reportWorkItemStatusRequest',
- request_type_name=u'DataflowProjectsLocationsJobsWorkItemsReportStatusRequest',
- response_type_name=u'ReportWorkItemStatusResponse',
- supports_download=False,
- )
-
class ProjectsLocationsJobsService(base_api.BaseApiService):
"""Service class for the projects_locations_jobs resource."""
@@ -468,6 +473,69 @@
def __init__(self, client):
super(DataflowV1b3.ProjectsLocationsJobsService, self).__init__(client)
+ self._method_configs = {
+ 'Create': base_api.ApiMethodInfo(
+ http_method=u'POST',
+ method_id=u'dataflow.projects.locations.jobs.create',
+ ordered_params=[u'projectId', u'location'],
+ path_params=[u'location', u'projectId'],
+ query_params=[u'replaceJobId', u'view'],
+ relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs',
+ request_field=u'job',
+ request_type_name=u'DataflowProjectsLocationsJobsCreateRequest',
+ response_type_name=u'Job',
+ supports_download=False,
+ ),
+ 'Get': base_api.ApiMethodInfo(
+ http_method=u'GET',
+ method_id=u'dataflow.projects.locations.jobs.get',
+ ordered_params=[u'projectId', u'location', u'jobId'],
+ path_params=[u'jobId', u'location', u'projectId'],
+ query_params=[u'view'],
+ relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}',
+ request_field='',
+ request_type_name=u'DataflowProjectsLocationsJobsGetRequest',
+ response_type_name=u'Job',
+ supports_download=False,
+ ),
+ 'GetMetrics': base_api.ApiMethodInfo(
+ http_method=u'GET',
+ method_id=u'dataflow.projects.locations.jobs.getMetrics',
+ ordered_params=[u'projectId', u'location', u'jobId'],
+ path_params=[u'jobId', u'location', u'projectId'],
+ query_params=[u'startTime'],
+ relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/metrics',
+ request_field='',
+ request_type_name=u'DataflowProjectsLocationsJobsGetMetricsRequest',
+ response_type_name=u'JobMetrics',
+ supports_download=False,
+ ),
+ 'List': base_api.ApiMethodInfo(
+ http_method=u'GET',
+ method_id=u'dataflow.projects.locations.jobs.list',
+ ordered_params=[u'projectId', u'location'],
+ path_params=[u'location', u'projectId'],
+ query_params=[u'filter', u'pageSize', u'pageToken', u'view'],
+ relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs',
+ request_field='',
+ request_type_name=u'DataflowProjectsLocationsJobsListRequest',
+ response_type_name=u'ListJobsResponse',
+ supports_download=False,
+ ),
+ 'Update': base_api.ApiMethodInfo(
+ http_method=u'PUT',
+ method_id=u'dataflow.projects.locations.jobs.update',
+ ordered_params=[u'projectId', u'location', u'jobId'],
+ path_params=[u'jobId', u'location', u'projectId'],
+ query_params=[],
+ relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}',
+ request_field=u'job',
+ request_type_name=u'DataflowProjectsLocationsJobsUpdateRequest',
+ response_type_name=u'Job',
+ supports_download=False,
+ ),
+ }
+
self._upload_configs = {
}
@@ -484,19 +552,6 @@
return self._RunMethod(
config, request, global_params=global_params)
- Create.method_config = lambda: base_api.ApiMethodInfo(
- http_method=u'POST',
- method_id=u'dataflow.projects.locations.jobs.create',
- ordered_params=[u'projectId', u'location'],
- path_params=[u'location', u'projectId'],
- query_params=[u'replaceJobId', u'view'],
- relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs',
- request_field=u'job',
- request_type_name=u'DataflowProjectsLocationsJobsCreateRequest',
- response_type_name=u'Job',
- supports_download=False,
- )
-
def Get(self, request, global_params=None):
"""Gets the state of the specified Cloud Dataflow job.
@@ -510,19 +565,6 @@
return self._RunMethod(
config, request, global_params=global_params)
- Get.method_config = lambda: base_api.ApiMethodInfo(
- http_method=u'GET',
- method_id=u'dataflow.projects.locations.jobs.get',
- ordered_params=[u'projectId', u'location', u'jobId'],
- path_params=[u'jobId', u'location', u'projectId'],
- query_params=[u'view'],
- relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}',
- request_field='',
- request_type_name=u'DataflowProjectsLocationsJobsGetRequest',
- response_type_name=u'Job',
- supports_download=False,
- )
-
def GetMetrics(self, request, global_params=None):
"""Request the job status.
@@ -536,19 +578,6 @@
return self._RunMethod(
config, request, global_params=global_params)
- GetMetrics.method_config = lambda: base_api.ApiMethodInfo(
- http_method=u'GET',
- method_id=u'dataflow.projects.locations.jobs.getMetrics',
- ordered_params=[u'projectId', u'location', u'jobId'],
- path_params=[u'jobId', u'location', u'projectId'],
- query_params=[u'startTime'],
- relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/metrics',
- request_field='',
- request_type_name=u'DataflowProjectsLocationsJobsGetMetricsRequest',
- response_type_name=u'JobMetrics',
- supports_download=False,
- )
-
def List(self, request, global_params=None):
"""List the jobs of a project.
@@ -562,19 +591,6 @@
return self._RunMethod(
config, request, global_params=global_params)
- List.method_config = lambda: base_api.ApiMethodInfo(
- http_method=u'GET',
- method_id=u'dataflow.projects.locations.jobs.list',
- ordered_params=[u'projectId', u'location'],
- path_params=[u'location', u'projectId'],
- query_params=[u'filter', u'pageSize', u'pageToken', u'view'],
- relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs',
- request_field='',
- request_type_name=u'DataflowProjectsLocationsJobsListRequest',
- response_type_name=u'ListJobsResponse',
- supports_download=False,
- )
-
def Update(self, request, global_params=None):
"""Updates the state of an existing Cloud Dataflow job.
@@ -588,19 +604,6 @@
return self._RunMethod(
config, request, global_params=global_params)
- Update.method_config = lambda: base_api.ApiMethodInfo(
- http_method=u'PUT',
- method_id=u'dataflow.projects.locations.jobs.update',
- ordered_params=[u'projectId', u'location', u'jobId'],
- path_params=[u'jobId', u'location', u'projectId'],
- query_params=[],
- relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}',
- request_field=u'job',
- request_type_name=u'DataflowProjectsLocationsJobsUpdateRequest',
- response_type_name=u'Job',
- supports_download=False,
- )
-
class ProjectsLocationsService(base_api.BaseApiService):
"""Service class for the projects_locations resource."""
@@ -608,6 +611,9 @@
def __init__(self, client):
super(DataflowV1b3.ProjectsLocationsService, self).__init__(client)
+ self._method_configs = {
+ }
+
self._upload_configs = {
}
@@ -618,6 +624,21 @@
def __init__(self, client):
super(DataflowV1b3.ProjectsTemplatesService, self).__init__(client)
+ self._method_configs = {
+ 'Create': base_api.ApiMethodInfo(
+ http_method=u'POST',
+ method_id=u'dataflow.projects.templates.create',
+ ordered_params=[u'projectId'],
+ path_params=[u'projectId'],
+ query_params=[],
+ relative_path=u'v1b3/projects/{projectId}/templates',
+ request_field=u'createJobFromTemplateRequest',
+ request_type_name=u'DataflowProjectsTemplatesCreateRequest',
+ response_type_name=u'Job',
+ supports_download=False,
+ ),
+ }
+
self._upload_configs = {
}
@@ -634,19 +655,6 @@
return self._RunMethod(
config, request, global_params=global_params)
- Create.method_config = lambda: base_api.ApiMethodInfo(
- http_method=u'POST',
- method_id=u'dataflow.projects.templates.create',
- ordered_params=[u'projectId'],
- path_params=[u'projectId'],
- query_params=[],
- relative_path=u'v1b3/projects/{projectId}/templates',
- request_field=u'createJobFromTemplateRequest',
- request_type_name=u'DataflowProjectsTemplatesCreateRequest',
- response_type_name=u'Job',
- supports_download=False,
- )
-
class ProjectsService(base_api.BaseApiService):
"""Service class for the projects resource."""
@@ -654,6 +662,21 @@
def __init__(self, client):
super(DataflowV1b3.ProjectsService, self).__init__(client)
+ self._method_configs = {
+ 'WorkerMessages': base_api.ApiMethodInfo(
+ http_method=u'POST',
+ method_id=u'dataflow.projects.workerMessages',
+ ordered_params=[u'projectId'],
+ path_params=[u'projectId'],
+ query_params=[],
+ relative_path=u'v1b3/projects/{projectId}/WorkerMessages',
+ request_field=u'sendWorkerMessagesRequest',
+ request_type_name=u'DataflowProjectsWorkerMessagesRequest',
+ response_type_name=u'SendWorkerMessagesResponse',
+ supports_download=False,
+ ),
+ }
+
self._upload_configs = {
}
@@ -669,16 +692,3 @@
config = self.GetMethodConfig('WorkerMessages')
return self._RunMethod(
config, request, global_params=global_params)
-
- WorkerMessages.method_config = lambda: base_api.ApiMethodInfo(
- http_method=u'POST',
- method_id=u'dataflow.projects.workerMessages',
- ordered_params=[u'projectId'],
- path_params=[u'projectId'],
- query_params=[],
- relative_path=u'v1b3/projects/{projectId}/WorkerMessages',
- request_field=u'sendWorkerMessagesRequest',
- request_type_name=u'DataflowProjectsWorkerMessagesRequest',
- response_type_name=u'SendWorkerMessagesResponse',
- supports_download=False,
- )
diff --git a/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_messages.py b/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_messages.py
index a42154e..4f1ccdb 100644
--- a/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_messages.py
+++ b/sdks/python/apache_beam/runners/dataflow/internal/clients/dataflow/dataflow_v1b3_messages.py
@@ -129,6 +129,55 @@
maxNumWorkers = _messages.IntegerField(2, variant=_messages.Variant.INT32)
+class CPUTime(_messages.Message):
+ """Modeled after information exposed by /proc/stat.
+
+ Fields:
+ rate: Average CPU utilization rate (% non-idle cpu / second) since
+ previous sample.
+ timestamp: Timestamp of the measurement.
+ totalMs: Total active CPU time across all cores (ie., non-idle) in
+ milliseconds since start-up.
+ """
+
+ rate = _messages.FloatField(1)
+ timestamp = _messages.StringField(2)
+ totalMs = _messages.IntegerField(3, variant=_messages.Variant.UINT64)
+
+
+class ComponentSource(_messages.Message):
+ """Description of an interstitial value between transforms in an execution
+ stage.
+
+ Fields:
+ name: Dataflow service generated name for this source.
+ originalTransformOrCollection: User name for the original user transform
+ or collection with which this source is most closely associated.
+ userName: Human-readable name for this transform; may be user or system
+ generated.
+ """
+
+ name = _messages.StringField(1)
+ originalTransformOrCollection = _messages.StringField(2)
+ userName = _messages.StringField(3)
+
+
+class ComponentTransform(_messages.Message):
+ """Description of a transform executed as part of an execution stage.
+
+ Fields:
+ name: Dataflow service generated name for this source.
+ originalTransform: User name for the original user transform with which
+ this transform is most closely associated.
+ userName: Human-readable name for this transform; may be user or system
+ generated.
+ """
+
+ name = _messages.StringField(1)
+ originalTransform = _messages.StringField(2)
+ userName = _messages.StringField(3)
+
+
class ComputationTopology(_messages.Message):
"""All configuration data for a particular Computation.
@@ -239,8 +288,8 @@
structured names are the same get merged into a single value for the job.
Enums:
+ OriginValueValuesEnum: One of the standard Origins defined above.
PortionValueValuesEnum: Portion of this counter, either key or value.
- StandardOriginValueValuesEnum: One of the standard Origins defined above.
Fields:
componentStepName: Name of the optimized step being executed by the
@@ -249,14 +298,25 @@
component steps.
name: Counter name. Not necessarily globally-unique, but unique within the
context of the other fields. Required.
+ origin: One of the standard Origins defined above.
+ originNamespace: A string containing a more specific namespace of the
+ counter's origin.
originalStepName: System generated name of the original step in the user's
graph, before optimization.
- otherOrigin: A string containing the origin of the counter.
portion: Portion of this counter, either key or value.
- standardOrigin: One of the standard Origins defined above.
workerId: ID of a particular worker.
"""
+ class OriginValueValuesEnum(_messages.Enum):
+ """One of the standard Origins defined above.
+
+ Values:
+ SYSTEM: Counter was created by the Dataflow system.
+ USER: Counter was created by the user.
+ """
+ SYSTEM = 0
+ USER = 1
+
class PortionValueValuesEnum(_messages.Enum):
"""Portion of this counter, either key or value.
@@ -269,23 +329,13 @@
KEY = 1
VALUE = 2
- class StandardOriginValueValuesEnum(_messages.Enum):
- """One of the standard Origins defined above.
-
- Values:
- DATAFLOW: Counter was created by the Dataflow system.
- USER: Counter was created by the user.
- """
- DATAFLOW = 0
- USER = 1
-
componentStepName = _messages.StringField(1)
executionStepName = _messages.StringField(2)
name = _messages.StringField(3)
- originalStepName = _messages.StringField(4)
- otherOrigin = _messages.StringField(5)
- portion = _messages.EnumField('PortionValueValuesEnum', 6)
- standardOrigin = _messages.EnumField('StandardOriginValueValuesEnum', 7)
+ origin = _messages.EnumField('OriginValueValuesEnum', 4)
+ originNamespace = _messages.StringField(5)
+ originalStepName = _messages.StringField(6)
+ portion = _messages.EnumField('PortionValueValuesEnum', 7)
workerId = _messages.StringField(8)
@@ -437,10 +487,12 @@
JOB_VIEW_UNKNOWN: <no description>
JOB_VIEW_SUMMARY: <no description>
JOB_VIEW_ALL: <no description>
+ JOB_VIEW_DESCRIPTION: <no description>
"""
JOB_VIEW_UNKNOWN = 0
JOB_VIEW_SUMMARY = 1
JOB_VIEW_ALL = 2
+ JOB_VIEW_DESCRIPTION = 3
job = _messages.MessageField('Job', 1)
location = _messages.StringField(2)
@@ -516,10 +568,12 @@
JOB_VIEW_UNKNOWN: <no description>
JOB_VIEW_SUMMARY: <no description>
JOB_VIEW_ALL: <no description>
+ JOB_VIEW_DESCRIPTION: <no description>
"""
JOB_VIEW_UNKNOWN = 0
JOB_VIEW_SUMMARY = 1
JOB_VIEW_ALL = 2
+ JOB_VIEW_DESCRIPTION = 3
jobId = _messages.StringField(1, required=True)
location = _messages.StringField(2)
@@ -570,10 +624,12 @@
JOB_VIEW_UNKNOWN: <no description>
JOB_VIEW_SUMMARY: <no description>
JOB_VIEW_ALL: <no description>
+ JOB_VIEW_DESCRIPTION: <no description>
"""
JOB_VIEW_UNKNOWN = 0
JOB_VIEW_SUMMARY = 1
JOB_VIEW_ALL = 2
+ JOB_VIEW_DESCRIPTION = 3
filter = _messages.EnumField('FilterValueValuesEnum', 1)
location = _messages.StringField(2)
@@ -703,10 +759,12 @@
JOB_VIEW_UNKNOWN: <no description>
JOB_VIEW_SUMMARY: <no description>
JOB_VIEW_ALL: <no description>
+ JOB_VIEW_DESCRIPTION: <no description>
"""
JOB_VIEW_UNKNOWN = 0
JOB_VIEW_SUMMARY = 1
JOB_VIEW_ALL = 2
+ JOB_VIEW_DESCRIPTION = 3
job = _messages.MessageField('Job', 1)
location = _messages.StringField(2, required=True)
@@ -752,10 +810,12 @@
JOB_VIEW_UNKNOWN: <no description>
JOB_VIEW_SUMMARY: <no description>
JOB_VIEW_ALL: <no description>
+ JOB_VIEW_DESCRIPTION: <no description>
"""
JOB_VIEW_UNKNOWN = 0
JOB_VIEW_SUMMARY = 1
JOB_VIEW_ALL = 2
+ JOB_VIEW_DESCRIPTION = 3
jobId = _messages.StringField(1, required=True)
location = _messages.StringField(2, required=True)
@@ -806,10 +866,12 @@
JOB_VIEW_UNKNOWN: <no description>
JOB_VIEW_SUMMARY: <no description>
JOB_VIEW_ALL: <no description>
+ JOB_VIEW_DESCRIPTION: <no description>
"""
JOB_VIEW_UNKNOWN = 0
JOB_VIEW_SUMMARY = 1
JOB_VIEW_ALL = 2
+ JOB_VIEW_DESCRIPTION = 3
filter = _messages.EnumField('FilterValueValuesEnum', 1)
location = _messages.StringField(2, required=True)
@@ -1014,6 +1076,46 @@
sizeGb = _messages.IntegerField(3, variant=_messages.Variant.INT32)
+class DisplayData(_messages.Message):
+ """Data provided with a pipeline or transform to provide descriptive info.
+
+ Fields:
+ boolValue: Contains value if the data is of a boolean type.
+ durationValue: Contains value if the data is of duration type.
+ floatValue: Contains value if the data is of float type.
+ int64Value: Contains value if the data is of int64 type.
+ javaClassValue: Contains value if the data is of java class type.
+ key: The key identifying the display data. This is intended to be used as
+ a label for the display data when viewed in a dax monitoring system.
+ label: An optional label to display in a dax UI for the element.
+ namespace: The namespace for the key. This is usually a class name or
+ programming language namespace (i.e. python module) which defines the
+ display data. This allows a dax monitoring system to specially handle
+ the data and perform custom rendering.
+ shortStrValue: A possible additional shorter value to display. For example
+ a java_class_name_value of com.mypackage.MyDoFn will be stored with
+ MyDoFn as the short_str_value and com.mypackage.MyDoFn as the
+ java_class_name value. short_str_value can be displayed and
+ java_class_name_value will be displayed as a tooltip.
+ strValue: Contains value if the data is of string type.
+ timestampValue: Contains value if the data is of timestamp type.
+ url: An optional full URL.
+ """
+
+ boolValue = _messages.BooleanField(1)
+ durationValue = _messages.StringField(2)
+ floatValue = _messages.FloatField(3, variant=_messages.Variant.FLOAT)
+ int64Value = _messages.IntegerField(4)
+ javaClassValue = _messages.StringField(5)
+ key = _messages.StringField(6)
+ label = _messages.StringField(7)
+ namespace = _messages.StringField(8)
+ shortStrValue = _messages.StringField(9)
+ strValue = _messages.StringField(10)
+ timestampValue = _messages.StringField(11)
+ url = _messages.StringField(12)
+
+
class DistributionUpdate(_messages.Message):
"""A metric value representing a distribution.
@@ -1210,6 +1312,59 @@
workerPools = _messages.MessageField('WorkerPool', 10, repeated=True)
+class ExecutionStageSummary(_messages.Message):
+ """Description of the composing transforms, names/ids, and input/outputs of
+ a stage of execution. Some composing transforms and sources may have been
+ generated by the Dataflow service during execution planning.
+
+ Enums:
+ KindValueValuesEnum: Type of tranform this stage is executing.
+
+ Fields:
+ componentSource: Collections produced and consumed by component transforms
+ of this stage.
+ componentTransform: Transforms that comprise this execution stage.
+ id: Dataflow service generated id for this stage.
+ inputSource: Input sources for this stage.
+ kind: Type of tranform this stage is executing.
+ name: Dataflow service generated name for this stage.
+ outputSource: Output sources for this stage.
+ """
+
+ class KindValueValuesEnum(_messages.Enum):
+ """Type of tranform this stage is executing.
+
+ Values:
+ UNKNOWN_KIND: Unrecognized transform type.
+ PAR_DO_KIND: ParDo transform.
+ GROUP_BY_KEY_KIND: Group By Key transform.
+ FLATTEN_KIND: Flatten transform.
+ READ_KIND: Read transform.
+ WRITE_KIND: Write transform.
+ CONSTANT_KIND: Constructs from a constant value, such as with Create.of.
+ SINGLETON_KIND: Creates a Singleton view of a collection.
+ SHUFFLE_KIND: Opening or closing a shuffle session, often as part of a
+ GroupByKey.
+ """
+ UNKNOWN_KIND = 0
+ PAR_DO_KIND = 1
+ GROUP_BY_KEY_KIND = 2
+ FLATTEN_KIND = 3
+ READ_KIND = 4
+ WRITE_KIND = 5
+ CONSTANT_KIND = 6
+ SINGLETON_KIND = 7
+ SHUFFLE_KIND = 8
+
+ componentSource = _messages.MessageField('ComponentSource', 1, repeated=True)
+ componentTransform = _messages.MessageField('ComponentTransform', 2, repeated=True)
+ id = _messages.IntegerField(3)
+ inputSource = _messages.MessageField('StageSource', 4, repeated=True)
+ kind = _messages.EnumField('KindValueValuesEnum', 5)
+ name = _messages.StringField(6)
+ outputSource = _messages.MessageField('StageSource', 7, repeated=True)
+
+
class FailedLocation(_messages.Message):
"""Indicates which location failed to respond to a request for data.
@@ -1412,8 +1567,7 @@
callers cannot mutate it.
currentStateTime: The timestamp associated with the current state.
environment: The environment for the job.
- executionInfo: Information about how the Cloud Dataflow service will run
- the job.
+ executionInfo: Deprecated.
id: The unique ID of this job. This field is set by the Cloud Dataflow
service when the Job is created, and is immutable for the life of the
job.
@@ -1429,6 +1583,10 @@
attempts to create a Job with the same name as an already-existing Job,
the attempt returns the existing Job. The name must match the regular
expression `[a-z]([-a-z0-9]{0,38}[a-z0-9])?`
+ pipelineDescription: Preliminary field: The format of this data may change
+ at any time. A description of the user pipeline and stages through which
+ it is executed. Created by Cloud Dataflow service. Only retrieved with
+ JOB_VIEW_DESCRIPTION or JOB_VIEW_ALL.
projectId: The ID of the Cloud Platform project that the job belongs to.
replaceJobId: If this job is an update of an existing job, this field is
the job ID of the job it replaced. When sending a `CreateJobRequest`,
@@ -1643,14 +1801,15 @@
labels = _messages.MessageField('LabelsValue', 8)
location = _messages.StringField(9)
name = _messages.StringField(10)
- projectId = _messages.StringField(11)
- replaceJobId = _messages.StringField(12)
- replacedByJobId = _messages.StringField(13)
- requestedState = _messages.EnumField('RequestedStateValueValuesEnum', 14)
- steps = _messages.MessageField('Step', 15, repeated=True)
- tempFiles = _messages.StringField(16, repeated=True)
- transformNameMapping = _messages.MessageField('TransformNameMappingValue', 17)
- type = _messages.EnumField('TypeValueValuesEnum', 18)
+ pipelineDescription = _messages.MessageField('PipelineDescription', 11)
+ projectId = _messages.StringField(12)
+ replaceJobId = _messages.StringField(13)
+ replacedByJobId = _messages.StringField(14)
+ requestedState = _messages.EnumField('RequestedStateValueValuesEnum', 15)
+ steps = _messages.MessageField('Step', 16, repeated=True)
+ tempFiles = _messages.StringField(17, repeated=True)
+ transformNameMapping = _messages.MessageField('TransformNameMappingValue', 18)
+ type = _messages.EnumField('TypeValueValuesEnum', 19)
class JobExecutionInfo(_messages.Message):
@@ -2259,6 +2418,24 @@
valueCombiningFn = _messages.MessageField('ValueCombiningFnValue', 6)
+class PipelineDescription(_messages.Message):
+ """A descriptive representation of submitted pipeline as well as the
+ executed form. This data is provided by the Dataflow service for ease of
+ visualizing the pipeline and interpretting Dataflow provided metrics.
+
+ Fields:
+ displayData: Pipeline level display data.
+ executionPipelineStage: Description of each stage of execution of the
+ pipeline.
+ originalPipelineTransform: Description of each transform in the pipeline
+ and collections between them.
+ """
+
+ displayData = _messages.MessageField('DisplayData', 1, repeated=True)
+ executionPipelineStage = _messages.MessageField('ExecutionStageSummary', 2, repeated=True)
+ originalPipelineTransform = _messages.MessageField('TransformSummary', 3, repeated=True)
+
+
class Position(_messages.Message):
"""Position defines a position within a collection of data. The value can
be either the end position, a key (used with ordered collections), a byte
@@ -2377,43 +2554,13 @@
class ResourceUtilizationReport(_messages.Message):
"""Worker metrics exported from workers. This contains resource utilization
metrics accumulated from a variety of sources. For more information, see go
- /df-resource-signals. Note that this proto closely follows the structure of
- its DFE siblings in its contents.
-
- Messages:
- MetricsValueListEntry: A MetricsValueListEntry object.
+ /df-resource-signals.
Fields:
- metrics: Each Struct must parallel DFE worker metrics protos (eg.,
- cpu_time metric will have nested values \u201ctimestamp_ms, total_ms, rate\u201d).
+ cpuTime: CPU utilization samples.
"""
- @encoding.MapUnrecognizedFields('additionalProperties')
- class MetricsValueListEntry(_messages.Message):
- """A MetricsValueListEntry object.
-
- Messages:
- AdditionalProperty: An additional property for a MetricsValueListEntry
- object.
-
- Fields:
- additionalProperties: Properties of the object.
- """
-
- class AdditionalProperty(_messages.Message):
- """An additional property for a MetricsValueListEntry object.
-
- Fields:
- key: Name of the additional property.
- value: A extra_types.JsonValue attribute.
- """
-
- key = _messages.StringField(1)
- value = _messages.MessageField('extra_types.JsonValue', 2)
-
- additionalProperties = _messages.MessageField('AdditionalProperty', 1, repeated=True)
-
- metrics = _messages.MessageField('MetricsValueListEntry', 1, repeated=True)
+ cpuTime = _messages.MessageField('CPUTime', 1, repeated=True)
class ResourceUtilizationReportResponse(_messages.Message):
@@ -2996,6 +3143,25 @@
lowBits = _messages.IntegerField(2, variant=_messages.Variant.UINT32)
+class StageSource(_messages.Message):
+ """Description of an input or output of an execution stage.
+
+ Fields:
+ name: Dataflow service generated name for this source.
+ originalUserTransformOrCollection: User name for the original user
+ transform or collection with which this source is most closely
+ associated.
+ sizeBytes: Size of the source, if measurable.
+ userName: Human-readable name for this source; may be user or system
+ generated.
+ """
+
+ name = _messages.StringField(1)
+ originalUserTransformOrCollection = _messages.StringField(2)
+ sizeBytes = _messages.IntegerField(3)
+ userName = _messages.StringField(4)
+
+
class StandardQueryParameters(_messages.Message):
"""Query parameters accepted by all methods.
@@ -3169,20 +3335,23 @@
Messages:
PropertiesValue: Named properties associated with the step. Each kind of
- predefined step has its own required set of properties.
+ predefined step has its own required set of properties. Must be provided
+ on Create. Only retrieved with JOB_VIEW_ALL.
Fields:
kind: The kind of step in the Cloud Dataflow job.
name: The name that identifies the step. This must be unique for each step
with respect to all other steps in the Cloud Dataflow job.
properties: Named properties associated with the step. Each kind of
- predefined step has its own required set of properties.
+ predefined step has its own required set of properties. Must be provided
+ on Create. Only retrieved with JOB_VIEW_ALL.
"""
@encoding.MapUnrecognizedFields('additionalProperties')
class PropertiesValue(_messages.Message):
"""Named properties associated with the step. Each kind of predefined step
- has its own required set of properties.
+ has its own required set of properties. Must be provided on Create. Only
+ retrieved with JOB_VIEW_ALL.
Messages:
AdditionalProperty: An additional property for a PropertiesValue object.
@@ -3502,6 +3671,56 @@
userStageToComputationNameMap = _messages.MessageField('UserStageToComputationNameMapValue', 5)
+class TransformSummary(_messages.Message):
+ """Description of the type, names/ids, and input/outputs for a transform.
+
+ Enums:
+ KindValueValuesEnum: Type of transform.
+
+ Fields:
+ displayData: Transform-specific display data.
+ id: SDK generated id of this transform instance.
+ inputCollectionName: User names for all collection inputs to this
+ transform.
+ kind: Type of transform.
+ name: User provided name for this transform instance.
+ outputCollectionName: User names for all collection outputs to this
+ transform.
+ """
+
+ class KindValueValuesEnum(_messages.Enum):
+ """Type of transform.
+
+ Values:
+ UNKNOWN_KIND: Unrecognized transform type.
+ PAR_DO_KIND: ParDo transform.
+ GROUP_BY_KEY_KIND: Group By Key transform.
+ FLATTEN_KIND: Flatten transform.
+ READ_KIND: Read transform.
+ WRITE_KIND: Write transform.
+ CONSTANT_KIND: Constructs from a constant value, such as with Create.of.
+ SINGLETON_KIND: Creates a Singleton view of a collection.
+ SHUFFLE_KIND: Opening or closing a shuffle session, often as part of a
+ GroupByKey.
+ """
+ UNKNOWN_KIND = 0
+ PAR_DO_KIND = 1
+ GROUP_BY_KEY_KIND = 2
+ FLATTEN_KIND = 3
+ READ_KIND = 4
+ WRITE_KIND = 5
+ CONSTANT_KIND = 6
+ SINGLETON_KIND = 7
+ SHUFFLE_KIND = 8
+
+ displayData = _messages.MessageField('DisplayData', 1, repeated=True)
+ id = _messages.StringField(2)
+ inputCollectionName = _messages.StringField(3, repeated=True)
+ kind = _messages.EnumField('KindValueValuesEnum', 4)
+ name = _messages.StringField(5)
+ outputCollectionName = _messages.StringField(6, repeated=True)
+
+
class WorkItem(_messages.Message):
"""WorkItem represents basic information about a WorkItem to be executed in
the cloud.