blob: 1438d95fdd076a1ddd547b2004468b317a24f2dc [file] [log] [blame]
// DO NOT EDIT.
// swift-format-ignore-file
//
// Generated by the Swift generator plugin for the protocol buffer compiler.
// Source: org/apache/beam/model/pipeline/v1/beam_runner_api.proto
//
// For information on using the generated types, please see the documentation:
// https://github.com/apple/swift-protobuf/
//
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Protocol Buffers describing the Runner API, which is the runner-independent,
// SDK-independent definition of the Beam model.
import Foundation
import SwiftProtobuf
// If the compiler emits an error on this type, it is because this file
// was generated by a version of the `protoc` Swift plug-in that is
// incompatible with the version of SwiftProtobuf to which you are linking.
// Please ensure that you are building against the same version of the API
// that was used to generate this file.
fileprivate struct _GeneratedWithProtocGenSwiftVersion: SwiftProtobuf.ProtobufAPIVersionCheck {
struct _2: SwiftProtobuf.ProtobufAPIVersion_2 {}
typealias Version = _2
}
struct Org_Apache_Beam_Model_Pipeline_V1_BeamConstants {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var unknownFields = SwiftProtobuf.UnknownStorage()
/// All timestamps in milliseconds since Jan 1, 1970.
enum Constants: SwiftProtobuf.Enum {
typealias RawValue = Int
/// All timestamps of elements or window boundaries must be within
/// the interval [MIN_TIMESTAMP_MILLIS, MAX_TIMESTAMP_MILLIS].
/// The smallest representable timestamp of an element or a window boundary.
case minTimestampMillis // = 0
/// The largest representable timestamp of an element or a window boundary.
case maxTimestampMillis // = 1
/// The maximum timestamp for the global window.
/// Triggers use max timestamp to set timers' timestamp. Timers fire when
/// the watermark passes their timestamps. So, the timestamp needs to be
/// smaller than the MAX_TIMESTAMP_MILLIS.
/// One standard day is subtracted from MAX_TIMESTAMP_MILLIS to make sure
/// the max timestamp is smaller than MAX_TIMESTAMP_MILLIS even after rounding up
/// to seconds or minutes.
case globalWindowMaxTimestampMillis // = 2
case UNRECOGNIZED(Int)
init() {
self = .minTimestampMillis
}
init?(rawValue: Int) {
switch rawValue {
case 0: self = .minTimestampMillis
case 1: self = .maxTimestampMillis
case 2: self = .globalWindowMaxTimestampMillis
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .minTimestampMillis: return 0
case .maxTimestampMillis: return 1
case .globalWindowMaxTimestampMillis: return 2
case .UNRECOGNIZED(let i): return i
}
}
}
init() {}
}
#if swift(>=4.2)
extension Org_Apache_Beam_Model_Pipeline_V1_BeamConstants.Constants: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Org_Apache_Beam_Model_Pipeline_V1_BeamConstants.Constants] = [
.minTimestampMillis,
.maxTimestampMillis,
.globalWindowMaxTimestampMillis,
]
}
#endif // swift(>=4.2)
/// A set of mappings from id to message. This is included as an optional field
/// on any proto message that may contain references needing resolution.
struct Org_Apache_Beam_Model_Pipeline_V1_Components {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) A map from pipeline-scoped id to PTransform.
///
/// Keys of the transforms map may be used by runners to identify pipeline
/// steps. Hence it's recommended to use strings that are not too long that
/// match regex '[A-Za-z0-9-_]+'.
var transforms: Dictionary<String,Org_Apache_Beam_Model_Pipeline_V1_PTransform> = [:]
/// (Required) A map from pipeline-scoped id to PCollection.
var pcollections: Dictionary<String,Org_Apache_Beam_Model_Pipeline_V1_PCollection> = [:]
/// (Required) A map from pipeline-scoped id to WindowingStrategy.
var windowingStrategies: Dictionary<String,Org_Apache_Beam_Model_Pipeline_V1_WindowingStrategy> = [:]
/// (Required) A map from pipeline-scoped id to Coder.
var coders: Dictionary<String,Org_Apache_Beam_Model_Pipeline_V1_Coder> = [:]
/// (Required) A map from pipeline-scoped id to Environment.
var environments: Dictionary<String,Org_Apache_Beam_Model_Pipeline_V1_Environment> = [:]
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// A Pipeline is a hierarchical graph of PTransforms, linked
/// by PCollections. A typical graph may look like:
///
/// Impulse -> PCollection -> ParDo -> PCollection -> GroupByKey -> ...
/// \> PCollection -> ParDo -> ...
/// \> ParDo -> ...
/// Impulse -> PCollection -> ParDo -> PCollection -> ...
///
/// This is represented by a number of by-reference maps to transforms,
/// PCollections, SDK environments, coders, etc., for
/// supporting compact reuse and arbitrary graph structure.
struct Org_Apache_Beam_Model_Pipeline_V1_Pipeline {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) The coders, UDFs, graph nodes, etc, that make up
/// this pipeline.
var components: Org_Apache_Beam_Model_Pipeline_V1_Components {
get {return _components ?? Org_Apache_Beam_Model_Pipeline_V1_Components()}
set {_components = newValue}
}
/// Returns true if `components` has been explicitly set.
var hasComponents: Bool {return self._components != nil}
/// Clears the value of `components`. Subsequent reads from it will return its default value.
mutating func clearComponents() {self._components = nil}
/// (Required) The ids of all PTransforms that are not contained within another
/// PTransform. These must be in shallow topological order, so that traversing
/// them recursively in this order yields a recursively topological traversal.
var rootTransformIds: [String] = []
/// (Optional) Static display data for the pipeline. If there is none,
/// it may be omitted.
var displayData: [Org_Apache_Beam_Model_Pipeline_V1_DisplayData] = []
/// (Optional) A set of requirements that the runner MUST understand and be
/// able to faithfully provide in order to execute this pipeline. These
/// may indicate that a runner must inspect new fields on a component or
/// provide additional guarantees when processing specific transforms.
/// A runner should reject any pipelines with unknown requirements.
var requirements: [String] = []
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
fileprivate var _components: Org_Apache_Beam_Model_Pipeline_V1_Components? = nil
}
/// Transforms are the operations in your pipeline, and provide a generic
/// processing framework. You provide processing logic in the form of a function
/// object (colloquially referred to as “user code”), and your user code is
/// applied to each element of an input PCollection (or more than one
/// PCollection). Depending on the pipeline runner and back-end that you choose,
/// many different workers across a cluster may execute instances of your user
/// code in parallel. The user code running on each worker generates the output
/// elements that are ultimately added to the final output PCollection that the
/// transform produces.
///
/// The Beam SDKs contain a number of different transforms that you can apply to
/// your pipeline’s PCollections. These include general-purpose core transforms,
/// such as ParDo or Combine. There are also pre-written composite transforms
/// included in the SDKs, which combine one or more of the core transforms in a
/// useful processing pattern, such as counting or combining elements in a
/// collection. You can also define your own more complex composite transforms to
/// fit your pipeline’s exact use case.
struct Org_Apache_Beam_Model_Pipeline_V1_PTransform {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) A unique name for the application node.
///
/// Ideally, this should be stable over multiple evolutions of a pipeline
/// for the purposes of logging and associating pipeline state with a node,
/// etc.
///
/// If it is not stable, then the runner decides what will happen. But, most
/// importantly, it must always be here and be unique, even if it is
/// autogenerated.
var uniqueName: String = String()
/// (Optional) A URN and payload that, together, fully defined the semantics
/// of this transform.
///
/// If absent, this must be an "anonymous" composite transform.
///
/// For primitive transform in the Runner API, this is required, and the
/// payloads are well-defined messages. When the URN indicates ParDo it
/// is a ParDoPayload, and so on. For some special composite transforms,
/// the payload is also officially defined. See StandardPTransforms for
/// details.
var spec: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec {
get {return _spec ?? Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec()}
set {_spec = newValue}
}
/// Returns true if `spec` has been explicitly set.
var hasSpec: Bool {return self._spec != nil}
/// Clears the value of `spec`. Subsequent reads from it will return its default value.
mutating func clearSpec() {self._spec = nil}
/// (Optional) A list of the ids of transforms that it contains.
///
/// Primitive transforms (see StandardPTransforms.Primitives) are not allowed
/// to specify subtransforms.
///
/// Note that a composite transform may have zero subtransforms as long as it
/// only outputs PCollections that are in its inputs.
var subtransforms: [String] = []
/// (Required) A map from local names of inputs (unique only with this map, and
/// likely embedded in the transform payload and serialized user code) to
/// PCollection ids.
///
/// The payload for this transform may clarify the relationship of these
/// inputs. For example:
///
/// - for a Flatten transform they are merged
/// - for a ParDo transform, some may be side inputs
///
/// All inputs are recorded here so that the topological ordering of
/// the graph is consistent whether or not the payload is understood.
var inputs: Dictionary<String,String> = [:]
/// (Required) A map from local names of outputs (unique only within this map,
/// and likely embedded in the transform payload and serialized user code)
/// to PCollection ids.
///
/// The URN or payload for this transform node may clarify the type and
/// relationship of these outputs. For example:
///
/// - for a ParDo transform, these are tags on PCollections, which will be
/// embedded in the DoFn.
var outputs: Dictionary<String,String> = [:]
/// (Optional) Static display data for this PTransform application. If
/// there is none, it may be omitted.
var displayData: [Org_Apache_Beam_Model_Pipeline_V1_DisplayData] = []
/// Environment where the current PTransform should be executed in.
///
/// Transforms that are required to be implemented by a runner must omit this.
/// All other transforms are required to specify this.
var environmentID: String = String()
/// (Optional) A map from URNs designating a type of annotation, to the
/// annotation in binary format. For example, an annotation could indicate
/// that this PTransform has specific privacy properties.
///
/// A runner MAY ignore types of annotations it doesn't understand. Therefore
/// annotations MUST NOT be used for metadata that can affect correct
/// execution of the transform.
var annotations: Dictionary<String,Data> = [:]
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
fileprivate var _spec: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec? = nil
}
struct Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var unknownFields = SwiftProtobuf.UnknownStorage()
/// Primitive transforms may not specify composite sub-transforms.
enum Primitives: SwiftProtobuf.Enum {
typealias RawValue = Int
/// ParDo is a Beam transform for generic parallel processing. The ParDo
/// processing paradigm is similar to the “Map” phase of a
/// Map/Shuffle/Reduce-style algorithm: a ParDo transform considers each
/// element in the input PCollection, performs some processing function
/// (your user code) on that element, and emits zero, one, or multiple
/// elements to an output PCollection.
///
/// See https://beam.apache.org/documentation/programming-guide/#pardo
/// for additional details.
///
/// Payload: ParDoPayload
case parDo // = 0
/// Flatten is a Beam transform for PCollection objects that store the same
/// data type. Flatten merges multiple PCollection objects into a single
/// logical PCollection.
///
/// See https://beam.apache.org/documentation/programming-guide/#flatten
/// for additional details.
///
/// Payload: None
case flatten // = 1
/// GroupByKey is a Beam transform for processing collections of key/value
/// pairs. It’s a parallel reduction operation, analogous to the Shuffle
/// phase of a Map/Shuffle/Reduce-style algorithm. The input to GroupByKey is
/// a collection of key/value pairs that represents a multimap, where the
/// collection contains multiple pairs that have the same key, but different
/// values. Given such a collection, you use GroupByKey to collect all of the
/// values associated with each unique key.
///
/// See https://beam.apache.org/documentation/programming-guide/#groupbykey
/// for additional details.
///
/// Never defines an environment as the runner is required to implement this
/// transform.
///
/// Payload: None
case groupByKey // = 2
/// A transform which produces a single empty byte array at the minimum
/// timestamp in the GlobalWindow.
///
/// Never defines an environment as the runner is required to implement this
/// transform.
///
/// Payload: None
case impulse // = 3
/// Windowing subdivides a PCollection according to the timestamps of its
/// individual elements. Transforms that aggregate multiple elements, such as
/// GroupByKey and Combine, work implicitly on a per-window basis — they
/// process each PCollection as a succession of multiple, finite windows,
/// though the entire collection itself may be of unbounded size.
///
/// See https://beam.apache.org/documentation/programming-guide/#windowing
/// for additional details.
///
/// Payload: WindowIntoPayload
case assignWindows // = 4
/// A testing input that generates an unbounded {@link PCollection} of
/// elements, advancing the watermark and processing time as elements are
/// emitted. After all of the specified elements are emitted, ceases to
/// produce output.
///
/// See https://beam.apache.org/blog/2016/10/20/test-stream.html
/// for additional details.
///
/// Payload: TestStreamPayload
case testStream // = 5
/// Represents mapping of main input window onto side input window.
///
/// Side input window mapping function:
/// Input: KV<nonce, MainInputWindow>
/// Output: KV<nonce, SideInputWindow>
///
/// For each main input window, the side input window is returned. The
/// nonce is used by a runner to associate each input with its output.
/// The nonce is represented as an opaque set of bytes.
///
/// Payload: SideInput#window_mapping_fn FunctionSpec
case mapWindows // = 6
/// Used to merge windows during a GroupByKey.
///
/// Window merging function:
/// Input: KV<nonce, iterable<OriginalWindow>>
/// Output: KV<nonce, KV<iterable<UnmergedOriginalWindow>, iterable<KV<MergedWindow, iterable<ConsumedOriginalWindow>>>>
///
/// For each set of original windows, a list of all unmerged windows is
/// output alongside a map of merged window to set of consumed windows.
/// All original windows must be contained in either the unmerged original
/// window set or one of the consumed original window sets. Each original
/// window can only be part of one output set. The nonce is used by a runner
/// to associate each input with its output. The nonce is represented as an
/// opaque set of bytes.
///
/// Payload: WindowingStrategy#window_fn FunctionSpec
case mergeWindows // = 7
/// A transform that translates a given element to its human-readable
/// representation.
///
/// Input: KV<nonce, element>
/// Output: KV<nonce, string>
///
/// For each given element, the implementation returns the best-effort
/// human-readable representation. When possible, the implementation could
/// call a user-overridable method on the type. For example, Java could
/// call `toString()`, Python could call `str()`, Golang could call
/// `String()`. The nonce is used by a runner to associate each input with
/// its output. The nonce is represented as an opaque set of bytes.
///
/// Payload: none
case toString // = 8
case UNRECOGNIZED(Int)
init() {
self = .parDo
}
init?(rawValue: Int) {
switch rawValue {
case 0: self = .parDo
case 1: self = .flatten
case 2: self = .groupByKey
case 3: self = .impulse
case 4: self = .assignWindows
case 5: self = .testStream
case 6: self = .mapWindows
case 7: self = .mergeWindows
case 8: self = .toString
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .parDo: return 0
case .flatten: return 1
case .groupByKey: return 2
case .impulse: return 3
case .assignWindows: return 4
case .testStream: return 5
case .mapWindows: return 6
case .mergeWindows: return 7
case .toString: return 8
case .UNRECOGNIZED(let i): return i
}
}
}
enum DeprecatedPrimitives: SwiftProtobuf.Enum {
typealias RawValue = Int
/// Represents the operation to read a Bounded or Unbounded source.
/// Payload: ReadPayload.
case read // = 0
/// Runners should move away from translating `CreatePCollectionView` and treat this as
/// part of the translation for a `ParDo` side input.
case createView // = 1
case UNRECOGNIZED(Int)
init() {
self = .read
}
init?(rawValue: Int) {
switch rawValue {
case 0: self = .read
case 1: self = .createView
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .read: return 0
case .createView: return 1
case .UNRECOGNIZED(let i): return i
}
}
}
enum Composites: SwiftProtobuf.Enum {
typealias RawValue = Int
/// Represents the Combine.perKey() operation.
/// If this is produced by an SDK, it is assumed that the SDK understands
/// each of CombineComponents.
/// Payload: CombinePayload
case combinePerKey // = 0
/// Represents the Combine.globally() operation.
/// If this is produced by an SDK, it is assumed that the SDK understands
/// each of CombineComponents.
/// Payload: CombinePayload
case combineGlobally // = 1
/// Represents the Reshuffle operation.
case reshuffle // = 2
/// Less well-known. Payload: WriteFilesPayload.
case writeFiles // = 3
/// Payload: PubSubReadPayload.
case pubsubRead // = 4
/// Payload: PubSubWritePayload.
case pubsubWrite // = 5
/// Used for pubsub dynamic destinations.
/// Payload: PubSubWritePayload.
case pubsubWriteV2 // = 7
/// Represents the GroupIntoBatches.WithShardedKey operation.
/// Payload: GroupIntoBatchesPayload
case groupIntoBatchesWithShardedKey // = 6
case UNRECOGNIZED(Int)
init() {
self = .combinePerKey
}
init?(rawValue: Int) {
switch rawValue {
case 0: self = .combinePerKey
case 1: self = .combineGlobally
case 2: self = .reshuffle
case 3: self = .writeFiles
case 4: self = .pubsubRead
case 5: self = .pubsubWrite
case 6: self = .groupIntoBatchesWithShardedKey
case 7: self = .pubsubWriteV2
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .combinePerKey: return 0
case .combineGlobally: return 1
case .reshuffle: return 2
case .writeFiles: return 3
case .pubsubRead: return 4
case .pubsubWrite: return 5
case .groupIntoBatchesWithShardedKey: return 6
case .pubsubWriteV2: return 7
case .UNRECOGNIZED(let i): return i
}
}
}
/// Payload for all of these: CombinePayload
enum CombineComponents: SwiftProtobuf.Enum {
typealias RawValue = Int
/// Represents the Pre-Combine part of a lifted Combine Per Key, as described
/// in the following document:
/// https://s.apache.org/beam-runner-api-combine-model#heading=h.ta0g6ase8z07
/// Payload: CombinePayload
case combinePerKeyPrecombine // = 0
/// Represents the Merge Accumulators part of a lifted Combine Per Key, as
/// described in the following document:
/// https://s.apache.org/beam-runner-api-combine-model#heading=h.jco9rvatld5m
/// Payload: CombinePayload
case combinePerKeyMergeAccumulators // = 1
/// Represents the Extract Outputs part of a lifted Combine Per Key, as
/// described in the following document:
/// https://s.apache.org/beam-runner-api-combine-model#heading=h.i9i6p8gtl6ku
/// Payload: CombinePayload
case combinePerKeyExtractOutputs // = 2
/// Represents the Combine Grouped Values transform, as described in the
/// following document:
/// https://s.apache.org/beam-runner-api-combine-model#heading=h.aj86ew4v1wk
/// Payload: CombinePayload
case combineGroupedValues // = 3
/// Represents the Convert To Accumulators transform, as described in the
/// following document:
/// https://s.apache.org/beam-runner-api-combine-model#heading=h.h5697l1scd9x
/// Payload: CombinePayload
case combinePerKeyConvertToAccumulators // = 4
case UNRECOGNIZED(Int)
init() {
self = .combinePerKeyPrecombine
}
init?(rawValue: Int) {
switch rawValue {
case 0: self = .combinePerKeyPrecombine
case 1: self = .combinePerKeyMergeAccumulators
case 2: self = .combinePerKeyExtractOutputs
case 3: self = .combineGroupedValues
case 4: self = .combinePerKeyConvertToAccumulators
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .combinePerKeyPrecombine: return 0
case .combinePerKeyMergeAccumulators: return 1
case .combinePerKeyExtractOutputs: return 2
case .combineGroupedValues: return 3
case .combinePerKeyConvertToAccumulators: return 4
case .UNRECOGNIZED(let i): return i
}
}
}
/// Payload for all of these: ParDoPayload containing the user's SDF
enum SplittableParDoComponents: SwiftProtobuf.Enum {
typealias RawValue = Int
/// Pairs the input element with its initial restriction.
/// Input: element; output: KV(element, restriction).
case pairWithRestriction // = 0
/// Splits the restriction of each element/restriction pair and returns the
/// resulting splits, with a corresponding floating point size estimation
/// for each.
///
/// A reasonable value for size is the number of bytes expected to be
/// produced by this (element, restriction) pair.
///
/// Input: KV(element, restriction)
/// Output: KV(KV(element, restriction), size))
case splitAndSizeRestrictions // = 1
/// Applies the DoFn to every element and restriction.
///
/// All primary and residuals returned from checkpointing or splitting must
/// have the same type as the input to this transform.
///
/// Input: KV(KV(element, restriction), size); output: DoFn's output.
case processSizedElementsAndRestrictions // = 2
/// Truncates the restriction of each element/restriction pair and returns
/// the finite restriction which will be processed when a pipeline is
/// drained. See
/// https://docs.google.com/document/d/1NExwHlj-2q2WUGhSO4jTu8XGhDPmm3cllSN8IMmWci8/edit#.
/// for additional details about drain.
///
/// Input: KV(KV(element, restriction), size);
/// Output: KV(KV(element, restriction), size).
case truncateSizedRestriction // = 3
case UNRECOGNIZED(Int)
init() {
self = .pairWithRestriction
}
init?(rawValue: Int) {
switch rawValue {
case 0: self = .pairWithRestriction
case 1: self = .splitAndSizeRestrictions
case 2: self = .processSizedElementsAndRestrictions
case 3: self = .truncateSizedRestriction
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .pairWithRestriction: return 0
case .splitAndSizeRestrictions: return 1
case .processSizedElementsAndRestrictions: return 2
case .truncateSizedRestriction: return 3
case .UNRECOGNIZED(let i): return i
}
}
}
/// Payload for all of these: GroupIntoBatchesPayload
enum GroupIntoBatchesComponents: SwiftProtobuf.Enum {
typealias RawValue = Int
case groupIntoBatches // = 0
case UNRECOGNIZED(Int)
init() {
self = .groupIntoBatches
}
init?(rawValue: Int) {
switch rawValue {
case 0: self = .groupIntoBatches
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .groupIntoBatches: return 0
case .UNRECOGNIZED(let i): return i
}
}
}
init() {}
}
#if swift(>=4.2)
extension Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms.Primitives: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms.Primitives] = [
.parDo,
.flatten,
.groupByKey,
.impulse,
.assignWindows,
.testStream,
.mapWindows,
.mergeWindows,
.toString,
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms.DeprecatedPrimitives: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms.DeprecatedPrimitives] = [
.read,
.createView,
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms.Composites: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms.Composites] = [
.combinePerKey,
.combineGlobally,
.reshuffle,
.writeFiles,
.pubsubRead,
.pubsubWrite,
.pubsubWriteV2,
.groupIntoBatchesWithShardedKey,
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms.CombineComponents: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms.CombineComponents] = [
.combinePerKeyPrecombine,
.combinePerKeyMergeAccumulators,
.combinePerKeyExtractOutputs,
.combineGroupedValues,
.combinePerKeyConvertToAccumulators,
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms.SplittableParDoComponents: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms.SplittableParDoComponents] = [
.pairWithRestriction,
.splitAndSizeRestrictions,
.processSizedElementsAndRestrictions,
.truncateSizedRestriction,
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms.GroupIntoBatchesComponents: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms.GroupIntoBatchesComponents] = [
.groupIntoBatches,
]
}
#endif // swift(>=4.2)
struct Org_Apache_Beam_Model_Pipeline_V1_StandardSideInputTypes {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var unknownFields = SwiftProtobuf.UnknownStorage()
enum Enum: SwiftProtobuf.Enum {
typealias RawValue = Int
/// Represents a view over a PCollection<V>.
///
/// StateGetRequests performed on this side input must use
/// StateKey.IterableSideInput.
case iterable // = 0
/// Represents a view over a PCollection<KV<K, V>>.
///
/// StateGetRequests performed on this side input must use
/// StateKey.MultimapKeysSideInput or StateKey.MultimapSideInput.
case multimap // = 1
case UNRECOGNIZED(Int)
init() {
self = .iterable
}
init?(rawValue: Int) {
switch rawValue {
case 0: self = .iterable
case 1: self = .multimap
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .iterable: return 0
case .multimap: return 1
case .UNRECOGNIZED(let i): return i
}
}
}
init() {}
}
#if swift(>=4.2)
extension Org_Apache_Beam_Model_Pipeline_V1_StandardSideInputTypes.Enum: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Org_Apache_Beam_Model_Pipeline_V1_StandardSideInputTypes.Enum] = [
.iterable,
.multimap,
]
}
#endif // swift(>=4.2)
struct Org_Apache_Beam_Model_Pipeline_V1_StandardUserStateTypes {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var unknownFields = SwiftProtobuf.UnknownStorage()
enum Enum: SwiftProtobuf.Enum {
typealias RawValue = Int
/// Represents a user state specification that supports a bag.
///
/// StateRequests performed on this user state must use
/// StateKey.BagUserState.
case bag // = 0
/// Represents a user state specification that supports a multimap.
///
/// StateRequests performed on this user state must use
/// StateKey.MultimapKeysUserState or StateKey.MultimapUserState.
case multimap // = 1
case UNRECOGNIZED(Int)
init() {
self = .bag
}
init?(rawValue: Int) {
switch rawValue {
case 0: self = .bag
case 1: self = .multimap
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .bag: return 0
case .multimap: return 1
case .UNRECOGNIZED(let i): return i
}
}
}
init() {}
}
#if swift(>=4.2)
extension Org_Apache_Beam_Model_Pipeline_V1_StandardUserStateTypes.Enum: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Org_Apache_Beam_Model_Pipeline_V1_StandardUserStateTypes.Enum] = [
.bag,
.multimap,
]
}
#endif // swift(>=4.2)
/// A PCollection!
struct Org_Apache_Beam_Model_Pipeline_V1_PCollection {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) A unique name for the PCollection.
///
/// Ideally, this should be stable over multiple evolutions of a pipeline
/// for the purposes of logging and associating pipeline state with a node,
/// etc.
///
/// If it is not stable, then the runner decides what will happen. But, most
/// importantly, it must always be here, even if it is autogenerated.
var uniqueName: String = String()
/// (Required) The id of the Coder for this PCollection.
var coderID: String = String()
/// (Required) Whether this PCollection is bounded or unbounded
var isBounded: Org_Apache_Beam_Model_Pipeline_V1_IsBounded.Enum = .unspecified
/// (Required) The id of the windowing strategy for this PCollection.
var windowingStrategyID: String = String()
/// (Optional) Static display data for the PCollection. If there is none,
/// it may be omitted.
var displayData: [Org_Apache_Beam_Model_Pipeline_V1_DisplayData] = []
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// The payload for the primitive ParDo transform.
struct Org_Apache_Beam_Model_Pipeline_V1_ParDoPayload {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) The FunctionSpec of the DoFn.
var doFn: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec {
get {return _doFn ?? Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec()}
set {_doFn = newValue}
}
/// Returns true if `doFn` has been explicitly set.
var hasDoFn: Bool {return self._doFn != nil}
/// Clears the value of `doFn`. Subsequent reads from it will return its default value.
mutating func clearDoFn() {self._doFn = nil}
/// (Optional) A mapping of local input names to side inputs, describing
/// the expected access pattern.
var sideInputs: Dictionary<String,Org_Apache_Beam_Model_Pipeline_V1_SideInput> = [:]
/// (Optional) A mapping of local state names to state specifications.
/// If this is set, the stateful processing requirement should also
/// be placed in the pipeline requirements.
var stateSpecs: Dictionary<String,Org_Apache_Beam_Model_Pipeline_V1_StateSpec> = [:]
/// (Optional) A mapping of local timer family names to timer family
/// specifications. If this is set, the stateful processing requirement should
/// also be placed in the pipeline requirements.
var timerFamilySpecs: Dictionary<String,Org_Apache_Beam_Model_Pipeline_V1_TimerFamilySpec> = [:]
/// (Optional) Only set when this ParDo contains a splittable DoFn.
/// If this is set, the corresponding standard requirement should also
/// be placed in the pipeline requirements.
var restrictionCoderID: String = String()
/// (Optional) Only set when this ParDo can request bundle finalization.
/// If this is set, the corresponding standard requirement should also
/// be placed in the pipeline requirements.
var requestsFinalization: Bool = false
/// Whether this stage requires time sorted input.
/// If this is set, the corresponding standard requirement should also
/// be placed in the pipeline requirements.
var requiresTimeSortedInput: Bool = false
/// Whether this stage requires stable input.
/// If this is set, the corresponding standard requirement should also
/// be placed in the pipeline requirements.
var requiresStableInput: Bool = false
/// If populated, the name of the timer family spec which should be notified
/// on each window expiry.
/// If this is set, the corresponding standard requirement should also
/// be placed in the pipeline requirements.
var onWindowExpirationTimerFamilySpec: String = String()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
fileprivate var _doFn: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec? = nil
}
struct Org_Apache_Beam_Model_Pipeline_V1_StateSpec {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// TODO(BEAM-13930): Deprecate and remove these state specs
var spec: Org_Apache_Beam_Model_Pipeline_V1_StateSpec.OneOf_Spec? = nil
var readModifyWriteSpec: Org_Apache_Beam_Model_Pipeline_V1_ReadModifyWriteStateSpec {
get {
if case .readModifyWriteSpec(let v)? = spec {return v}
return Org_Apache_Beam_Model_Pipeline_V1_ReadModifyWriteStateSpec()
}
set {spec = .readModifyWriteSpec(newValue)}
}
var bagSpec: Org_Apache_Beam_Model_Pipeline_V1_BagStateSpec {
get {
if case .bagSpec(let v)? = spec {return v}
return Org_Apache_Beam_Model_Pipeline_V1_BagStateSpec()
}
set {spec = .bagSpec(newValue)}
}
var combiningSpec: Org_Apache_Beam_Model_Pipeline_V1_CombiningStateSpec {
get {
if case .combiningSpec(let v)? = spec {return v}
return Org_Apache_Beam_Model_Pipeline_V1_CombiningStateSpec()
}
set {spec = .combiningSpec(newValue)}
}
var mapSpec: Org_Apache_Beam_Model_Pipeline_V1_MapStateSpec {
get {
if case .mapSpec(let v)? = spec {return v}
return Org_Apache_Beam_Model_Pipeline_V1_MapStateSpec()
}
set {spec = .mapSpec(newValue)}
}
var setSpec: Org_Apache_Beam_Model_Pipeline_V1_SetStateSpec {
get {
if case .setSpec(let v)? = spec {return v}
return Org_Apache_Beam_Model_Pipeline_V1_SetStateSpec()
}
set {spec = .setSpec(newValue)}
}
var orderedListSpec: Org_Apache_Beam_Model_Pipeline_V1_OrderedListStateSpec {
get {
if case .orderedListSpec(let v)? = spec {return v}
return Org_Apache_Beam_Model_Pipeline_V1_OrderedListStateSpec()
}
set {spec = .orderedListSpec(newValue)}
}
var multimapSpec: Org_Apache_Beam_Model_Pipeline_V1_MultimapStateSpec {
get {
if case .multimapSpec(let v)? = spec {return v}
return Org_Apache_Beam_Model_Pipeline_V1_MultimapStateSpec()
}
set {spec = .multimapSpec(newValue)}
}
/// (Required) URN of the protocol required by this state specification to present
/// the desired SDK-specific interface to a UDF.
///
/// This protocol defines the SDK harness <-> Runner Harness RPC
/// interface for accessing and mutating user state.
///
/// See StandardUserStateTypes for an enumeration of all user state types
/// defined.
var `protocol`: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec {
get {return _protocol ?? Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec()}
set {_protocol = newValue}
}
/// Returns true if ``protocol`` has been explicitly set.
var hasProtocol: Bool {return self._protocol != nil}
/// Clears the value of ``protocol``. Subsequent reads from it will return its default value.
mutating func clearProtocol() {self._protocol = nil}
var unknownFields = SwiftProtobuf.UnknownStorage()
/// TODO(BEAM-13930): Deprecate and remove these state specs
enum OneOf_Spec: Equatable {
case readModifyWriteSpec(Org_Apache_Beam_Model_Pipeline_V1_ReadModifyWriteStateSpec)
case bagSpec(Org_Apache_Beam_Model_Pipeline_V1_BagStateSpec)
case combiningSpec(Org_Apache_Beam_Model_Pipeline_V1_CombiningStateSpec)
case mapSpec(Org_Apache_Beam_Model_Pipeline_V1_MapStateSpec)
case setSpec(Org_Apache_Beam_Model_Pipeline_V1_SetStateSpec)
case orderedListSpec(Org_Apache_Beam_Model_Pipeline_V1_OrderedListStateSpec)
case multimapSpec(Org_Apache_Beam_Model_Pipeline_V1_MultimapStateSpec)
#if !swift(>=4.1)
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_StateSpec.OneOf_Spec, rhs: Org_Apache_Beam_Model_Pipeline_V1_StateSpec.OneOf_Spec) -> Bool {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch (lhs, rhs) {
case (.readModifyWriteSpec, .readModifyWriteSpec): return {
guard case .readModifyWriteSpec(let l) = lhs, case .readModifyWriteSpec(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.bagSpec, .bagSpec): return {
guard case .bagSpec(let l) = lhs, case .bagSpec(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.combiningSpec, .combiningSpec): return {
guard case .combiningSpec(let l) = lhs, case .combiningSpec(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.mapSpec, .mapSpec): return {
guard case .mapSpec(let l) = lhs, case .mapSpec(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.setSpec, .setSpec): return {
guard case .setSpec(let l) = lhs, case .setSpec(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.orderedListSpec, .orderedListSpec): return {
guard case .orderedListSpec(let l) = lhs, case .orderedListSpec(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.multimapSpec, .multimapSpec): return {
guard case .multimapSpec(let l) = lhs, case .multimapSpec(let r) = rhs else { preconditionFailure() }
return l == r
}()
default: return false
}
}
#endif
}
init() {}
fileprivate var _protocol: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec? = nil
}
struct Org_Apache_Beam_Model_Pipeline_V1_ReadModifyWriteStateSpec {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var coderID: String = String()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
struct Org_Apache_Beam_Model_Pipeline_V1_BagStateSpec {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var elementCoderID: String = String()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
struct Org_Apache_Beam_Model_Pipeline_V1_OrderedListStateSpec {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var elementCoderID: String = String()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
struct Org_Apache_Beam_Model_Pipeline_V1_CombiningStateSpec {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var accumulatorCoderID: String = String()
var combineFn: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec {
get {return _combineFn ?? Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec()}
set {_combineFn = newValue}
}
/// Returns true if `combineFn` has been explicitly set.
var hasCombineFn: Bool {return self._combineFn != nil}
/// Clears the value of `combineFn`. Subsequent reads from it will return its default value.
mutating func clearCombineFn() {self._combineFn = nil}
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
fileprivate var _combineFn: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec? = nil
}
struct Org_Apache_Beam_Model_Pipeline_V1_MapStateSpec {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var keyCoderID: String = String()
var valueCoderID: String = String()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
struct Org_Apache_Beam_Model_Pipeline_V1_MultimapStateSpec {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var keyCoderID: String = String()
var valueCoderID: String = String()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
struct Org_Apache_Beam_Model_Pipeline_V1_SetStateSpec {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var elementCoderID: String = String()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
struct Org_Apache_Beam_Model_Pipeline_V1_TimerFamilySpec {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var timeDomain: Org_Apache_Beam_Model_Pipeline_V1_TimeDomain.Enum = .unspecified
var timerFamilyCoderID: String = String()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
struct Org_Apache_Beam_Model_Pipeline_V1_IsBounded {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var unknownFields = SwiftProtobuf.UnknownStorage()
enum Enum: SwiftProtobuf.Enum {
typealias RawValue = Int
case unspecified // = 0
case unbounded // = 1
case bounded // = 2
case UNRECOGNIZED(Int)
init() {
self = .unspecified
}
init?(rawValue: Int) {
switch rawValue {
case 0: self = .unspecified
case 1: self = .unbounded
case 2: self = .bounded
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .unspecified: return 0
case .unbounded: return 1
case .bounded: return 2
case .UNRECOGNIZED(let i): return i
}
}
}
init() {}
}
#if swift(>=4.2)
extension Org_Apache_Beam_Model_Pipeline_V1_IsBounded.Enum: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Org_Apache_Beam_Model_Pipeline_V1_IsBounded.Enum] = [
.unspecified,
.unbounded,
.bounded,
]
}
#endif // swift(>=4.2)
/// The payload for the primitive Read transform.
struct Org_Apache_Beam_Model_Pipeline_V1_ReadPayload {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) The FunctionSpec of the source for this Read.
var source: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec {
get {return _source ?? Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec()}
set {_source = newValue}
}
/// Returns true if `source` has been explicitly set.
var hasSource: Bool {return self._source != nil}
/// Clears the value of `source`. Subsequent reads from it will return its default value.
mutating func clearSource() {self._source = nil}
/// (Required) Whether the source is bounded or unbounded
var isBounded: Org_Apache_Beam_Model_Pipeline_V1_IsBounded.Enum = .unspecified
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
fileprivate var _source: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec? = nil
}
/// The payload for the WindowInto transform.
struct Org_Apache_Beam_Model_Pipeline_V1_WindowIntoPayload {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) The FunctionSpec of the WindowFn.
var windowFn: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec {
get {return _windowFn ?? Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec()}
set {_windowFn = newValue}
}
/// Returns true if `windowFn` has been explicitly set.
var hasWindowFn: Bool {return self._windowFn != nil}
/// Clears the value of `windowFn`. Subsequent reads from it will return its default value.
mutating func clearWindowFn() {self._windowFn = nil}
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
fileprivate var _windowFn: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec? = nil
}
/// The payload for the special-but-not-primitive Combine transform.
struct Org_Apache_Beam_Model_Pipeline_V1_CombinePayload {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) The FunctionSpec of the CombineFn.
var combineFn: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec {
get {return _combineFn ?? Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec()}
set {_combineFn = newValue}
}
/// Returns true if `combineFn` has been explicitly set.
var hasCombineFn: Bool {return self._combineFn != nil}
/// Clears the value of `combineFn`. Subsequent reads from it will return its default value.
mutating func clearCombineFn() {self._combineFn = nil}
/// (Required) A reference to the Coder to use for accumulators of the CombineFn
var accumulatorCoderID: String = String()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
fileprivate var _combineFn: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec? = nil
}
/// The payload for the test-only primitive TestStream
struct Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) the coder for elements in the TestStream events
var coderID: String = String()
/// (Optional) If specified, the TestStream will replay these events.
var events: [Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event] = []
/// (Optional) If specified, points to a TestStreamService to be
/// used to retrieve events.
var endpoint: Org_Apache_Beam_Model_Pipeline_V1_ApiServiceDescriptor {
get {return _endpoint ?? Org_Apache_Beam_Model_Pipeline_V1_ApiServiceDescriptor()}
set {_endpoint = newValue}
}
/// Returns true if `endpoint` has been explicitly set.
var hasEndpoint: Bool {return self._endpoint != nil}
/// Clears the value of `endpoint`. Subsequent reads from it will return its default value.
mutating func clearEndpoint() {self._endpoint = nil}
var unknownFields = SwiftProtobuf.UnknownStorage()
struct Event {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var event: Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.OneOf_Event? = nil
var watermarkEvent: Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.AdvanceWatermark {
get {
if case .watermarkEvent(let v)? = event {return v}
return Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.AdvanceWatermark()
}
set {event = .watermarkEvent(newValue)}
}
var processingTimeEvent: Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.AdvanceProcessingTime {
get {
if case .processingTimeEvent(let v)? = event {return v}
return Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.AdvanceProcessingTime()
}
set {event = .processingTimeEvent(newValue)}
}
var elementEvent: Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.AddElements {
get {
if case .elementEvent(let v)? = event {return v}
return Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.AddElements()
}
set {event = .elementEvent(newValue)}
}
var unknownFields = SwiftProtobuf.UnknownStorage()
enum OneOf_Event: Equatable {
case watermarkEvent(Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.AdvanceWatermark)
case processingTimeEvent(Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.AdvanceProcessingTime)
case elementEvent(Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.AddElements)
#if !swift(>=4.1)
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.OneOf_Event, rhs: Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.OneOf_Event) -> Bool {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch (lhs, rhs) {
case (.watermarkEvent, .watermarkEvent): return {
guard case .watermarkEvent(let l) = lhs, case .watermarkEvent(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.processingTimeEvent, .processingTimeEvent): return {
guard case .processingTimeEvent(let l) = lhs, case .processingTimeEvent(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.elementEvent, .elementEvent): return {
guard case .elementEvent(let l) = lhs, case .elementEvent(let r) = rhs else { preconditionFailure() }
return l == r
}()
default: return false
}
}
#endif
}
/// Advances the watermark to the specified timestamp.
struct AdvanceWatermark {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) The watermark in millisecond to advance to.
var newWatermark: Int64 = 0
/// (Optional) The output watermark tag for a PCollection. If unspecified
/// or with an empty string, this will default to the Main PCollection
/// Output
var tag: String = String()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// Advances the processing time clock by the specified amount.
struct AdvanceProcessingTime {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) The duration in millisecond to advance by.
var advanceDuration: Int64 = 0
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// Adds elements to the stream to be emitted.
struct AddElements {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) The elements to add to the TestStream.
var elements: [Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.TimestampedElement] = []
/// (Optional) The output PCollection tag to add these elements to. If
/// unspecified or with an empty string, this will default to the Main
/// PCollection Output.
var tag: String = String()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
init() {}
}
/// A single element inside of the TestStream.
struct TimestampedElement {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) The element encoded. Currently the TestStream only supports
/// encoding primitives.
var encodedElement: Data = Data()
/// (Required) The event timestamp in millisecond of this element.
var timestamp: Int64 = 0
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
init() {}
fileprivate var _endpoint: Org_Apache_Beam_Model_Pipeline_V1_ApiServiceDescriptor? = nil
}
struct Org_Apache_Beam_Model_Pipeline_V1_EventsRequest {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// The set of PCollections to read from. These are the PTransform outputs
/// local names. These are a subset of the TestStream's outputs. This allows
/// Interactive Beam to cache many PCollections from a pipeline then replay a
/// subset of them.
var outputIds: [String] = []
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// The payload for the special-but-not-primitive WriteFiles transform.
struct Org_Apache_Beam_Model_Pipeline_V1_WriteFilesPayload {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) The FunctionSpec of the FileBasedSink.
var sink: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec {
get {return _sink ?? Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec()}
set {_sink = newValue}
}
/// Returns true if `sink` has been explicitly set.
var hasSink: Bool {return self._sink != nil}
/// Clears the value of `sink`. Subsequent reads from it will return its default value.
mutating func clearSink() {self._sink = nil}
/// (Required) The format function.
var formatFunction: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec {
get {return _formatFunction ?? Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec()}
set {_formatFunction = newValue}
}
/// Returns true if `formatFunction` has been explicitly set.
var hasFormatFunction: Bool {return self._formatFunction != nil}
/// Clears the value of `formatFunction`. Subsequent reads from it will return its default value.
mutating func clearFormatFunction() {self._formatFunction = nil}
var windowedWrites: Bool = false
var runnerDeterminedSharding: Bool = false
var sideInputs: Dictionary<String,Org_Apache_Beam_Model_Pipeline_V1_SideInput> = [:]
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
fileprivate var _sink: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec? = nil
fileprivate var _formatFunction: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec? = nil
}
/// Payload used by Google Cloud Pub/Sub read transform.
/// This can be used by runners that wish to override Beam Pub/Sub read transform
/// with a native implementation.
/// The SDK should guarantee that only one of topic, subscription,
/// topic_runtime_overridden and subscription_runtime_overridden is set.
/// The output of PubSubReadPayload should be bytes of serialized PubsubMessage
/// proto if with_attributes == true. Otherwise, the bytes is the raw payload.
struct Org_Apache_Beam_Model_Pipeline_V1_PubSubReadPayload {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// Topic to read from. Exactly one of topic or subscription should be set.
/// Topic format is: /topics/project_id/subscription_name
var topic: String = String()
/// Subscription to read from. Exactly one of topic or subscription should be set.
/// Subscription format is: /subscriptions/project_id/subscription_name
var subscription: String = String()
/// Attribute that provides element timestamps.
var timestampAttribute: String = String()
/// Attribute to be used for uniquely identifying messages.
var idAttribute: String = String()
/// If true, reads Pub/Sub payload as well as attributes. If false, reads only the payload.
var withAttributes: Bool = false
/// If set, the topic is expected to be provided during runtime.
var topicRuntimeOverridden: String = String()
/// If set, the subscription that is expected to be provided during runtime.
var subscriptionRuntimeOverridden: String = String()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// Payload used by Google Cloud Pub/Sub write transform.
/// This can be used by runners that wish to override Beam Pub/Sub write transform
/// with a native implementation.
/// The SDK should guarantee that only one of topic and topic_runtime_overridden
/// is set.
/// The output of PubSubWritePayload should be bytes if serialized PubsubMessage
/// proto.
struct Org_Apache_Beam_Model_Pipeline_V1_PubSubWritePayload {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// Topic to write to.
/// Topic format is: /topics/project_id/subscription_name
var topic: String = String()
/// Attribute that provides element timestamps.
var timestampAttribute: String = String()
/// Attribute that uniquely identify messages.
var idAttribute: String = String()
/// If set, the topic is expected to be provided during runtime.
var topicRuntimeOverridden: String = String()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// Payload for GroupIntoBatches composite transform.
struct Org_Apache_Beam_Model_Pipeline_V1_GroupIntoBatchesPayload {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// Max size of a batch.
var batchSize: Int64 = 0
/// Max byte size of a batch in element.
var batchSizeBytes: Int64 = 0
/// (Optional) Max duration a batch is allowed to be cached in states.
var maxBufferingDurationMillis: Int64 = 0
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// A coder, the binary format for serialization and deserialization of data in
/// a pipeline.
struct Org_Apache_Beam_Model_Pipeline_V1_Coder {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) A specification for the coder, as a URN plus parameters. This
/// may be a cross-language agreed-upon format, or it may be a "custom coder"
/// that can only be used by a particular SDK. It does not include component
/// coders, as it is beneficial for these to be comprehensible to a runner
/// regardless of whether the binary format is agreed-upon.
var spec: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec {
get {return _spec ?? Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec()}
set {_spec = newValue}
}
/// Returns true if `spec` has been explicitly set.
var hasSpec: Bool {return self._spec != nil}
/// Clears the value of `spec`. Subsequent reads from it will return its default value.
mutating func clearSpec() {self._spec = nil}
/// (Optional) If this coder is parametric, such as ListCoder(VarIntCoder),
/// this is a list of the components. In order for encodings to be identical,
/// the FunctionSpec and all components must be identical, recursively.
var componentCoderIds: [String] = []
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
fileprivate var _spec: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec? = nil
}
struct Org_Apache_Beam_Model_Pipeline_V1_StandardCoders {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var unknownFields = SwiftProtobuf.UnknownStorage()
enum Enum: SwiftProtobuf.Enum {
typealias RawValue = Int
/// Components: None
case bytes // = 0
/// Components: None
case stringUtf8 // = 10
/// Components: The key and value coder, in that order.
case kv // = 1
/// Components: None
case bool // = 12
/// Variable length Encodes a 64-bit integer.
/// Components: None
case varint // = 2
/// Encodes the floating point value as a big-endian 64-bit integer
/// according to the IEEE 754 double format bit layout.
/// Components: None
case double // = 11
/// Encodes an iterable of elements.
///
/// The encoding for an iterable [e1...eN] of known length N is
///
/// fixed32(N)
/// encode(e1) encode(e2) encode(e3) ... encode(eN)
///
/// If the length is unknown, it is batched up into groups of size b1..bM
/// and encoded as
///
/// fixed32(-1)
/// varInt64(b1) encode(e1) encode(e2) ... encode(e_b1)
/// varInt64(b2) encode(e_(b1+1)) encode(e_(b1+2)) ... encode(e_(b1+b2))
/// ...
/// varInt64(bM) encode(e_(N-bM+1)) encode(e_(N-bM+2)) ... encode(eN)
/// varInt64(0)
///
/// Components: Coder for a single element.
case iterable // = 3
/// Encodes a timer containing a user key, a dynamic timer tag, a clear bit,
/// a fire timestamp, a hold timestamp, the windows and the paneinfo.
/// The encoding is represented as:
/// user key - user defined key, uses the component coder.
/// dynamic timer tag - a string which identifies a timer.
/// windows - uses component coders.
/// clear bit - a boolean set for clearing the timer.
/// fire timestamp - a big endian 8 byte integer representing millis-since-epoch.
/// The encoded representation is shifted so that the byte representation of
/// negative values are lexicographically ordered before the byte representation
/// of positive values. This is typically done by subtracting -9223372036854775808
/// from the value and encoding it as a signed big endian integer. Example values:
///
/// -9223372036854775808: 00 00 00 00 00 00 00 00
/// -255: 7F FF FF FF FF FF FF 01
/// -1: 7F FF FF FF FF FF FF FF
/// 0: 80 00 00 00 00 00 00 00
/// 1: 80 00 00 00 00 00 00 01
/// 256: 80 00 00 00 00 00 01 00
/// 9223372036854775807: FF FF FF FF FF FF FF FF
/// hold timestamp - similar to the fire timestamp.
/// paneinfo - similar to the paneinfo of the windowed_value.
/// Components: Coder for the key and windows.
case timer // = 4
/// Components: None
case intervalWindow // = 5
/// Components: The coder to attach a length prefix to
case lengthPrefix // = 6
/// Components: None
case globalWindow // = 7
/// Encodes an element, the windows it is in, the timestamp of the element,
/// and the pane of the element. The encoding is represented as:
/// timestamp windows pane element
/// timestamp - A big endian 8 byte integer representing millis-since-epoch.
/// The encoded representation is shifted so that the byte representation
/// of negative values are lexicographically ordered before the byte
/// representation of positive values. This is typically done by
/// subtracting -9223372036854775808 from the value and encoding it as a
/// signed big endian integer. Example values:
///
/// -9223372036854775808: 00 00 00 00 00 00 00 00
/// -255: 7F FF FF FF FF FF FF 01
/// -1: 7F FF FF FF FF FF FF FF
/// 0: 80 00 00 00 00 00 00 00
/// 1: 80 00 00 00 00 00 00 01
/// 256: 80 00 00 00 00 00 01 00
/// 9223372036854775807: FF FF FF FF FF FF FF FF
///
/// windows - The windows are encoded using the beam:coder:iterable:v1
/// format, where the windows are encoded using the supplied window
/// coder.
///
/// pane - The first byte of the pane info determines which type of
/// encoding is used, as well as the is_first, is_last, and timing
/// fields. If this byte is bits [0 1 2 3 4 5 6 7], then:
/// * bits [0 1 2 3] determine the encoding as follows:
/// 0000 - The entire pane info is encoded as a single byte.
/// The is_first, is_last, and timing fields are encoded
/// as below, and the index and non-speculative index are
/// both zero (and hence are not encoded here).
/// 0001 - The pane info is encoded as this byte plus a single
/// VarInt encoed integer representing the pane index. The
/// non-speculative index can be derived as follows:
/// -1 if the pane is early, otherwise equal to index.
/// 0010 - The pane info is encoded as this byte plus two VarInt
/// encoded integers representing the pane index and
/// non-speculative index respectively.
/// * bits [4 5] encode the timing as follows:
/// 00 - early
/// 01 - on time
/// 10 - late
/// 11 - unknown
/// * bit 6 is 1 if this is the first pane, 0 otherwise.
/// * bit 7 is 1 if this is the last pane, 0 otherwise.
///
/// element - The element incoded using the supplied element coder.
///
/// Components: The element coder and the window coder, in that order.
case windowedValue // = 8
/// A windowed value coder with parameterized timestamp, windows and pane info.
/// Encodes an element with only the value of the windowed value.
/// Decodes the value and assigns the parameterized timestamp, windows and pane info to the
/// windowed value.
/// Components: The element coder and the window coder, in that order
/// The payload of this coder is an encoded windowed value using the
/// beam:coder:windowed_value:v1 coder parameterized by a beam:coder:bytes:v1
/// element coder and the window coder that this param_windowed_value coder uses.
case paramWindowedValue // = 14
/// Encodes an iterable of elements, some of which may be stored elsewhere.
///
/// The encoding for a state-backed iterable is the same as that for
/// an iterable, but the final varInt64(0) terminating the set of batches
/// may instead be replaced by
///
/// varInt64(-1)
/// varInt64(len(token))
/// token
///
/// where token is an opaque byte string that can be used to fetch the
/// remainder of the iterable (e.g. over the state API).
///
/// Components: Coder for a single element.
case stateBackedIterable // = 9
/// Encodes an arbitrary user defined window and its max timestamp (inclusive).
/// The encoding format is:
/// maxTimestamp window
///
/// maxTimestamp - A big endian 8 byte integer representing millis-since-epoch.
/// The encoded representation is shifted so that the byte representation
/// of negative values are lexicographically ordered before the byte
/// representation of positive values. This is typically done by
/// subtracting -9223372036854775808 from the value and encoding it as a
/// signed big endian integer. Example values:
///
/// -9223372036854775808: 00 00 00 00 00 00 00 00
/// -255: 7F FF FF FF FF FF FF 01
/// -1: 7F FF FF FF FF FF FF FF
/// 0: 80 00 00 00 00 00 00 00
/// 1: 80 00 00 00 00 00 00 01
/// 256: 80 00 00 00 00 00 01 00
/// 9223372036854775807: FF FF FF FF FF FF FF FF
///
/// window - the window is encoded using the supplied window coder.
///
/// Components: Coder for the custom window type.
case customWindow // = 16
/// Encodes a "row", an element with a known schema, defined by an
/// instance of Schema from schema.proto.
///
/// A row is encoded as the concatenation of:
/// - The number of attributes in the schema, encoded with
/// beam:coder:varint:v1. This makes it possible to detect certain
/// allowed schema changes (appending or removing columns) in
/// long-running streaming pipelines.
/// - A byte array representing a packed bitset indicating null fields (a
/// 1 indicating a null) encoded with beam:coder:bytes:v1. The unused
/// bits in the last byte must be set to 0. If there are no nulls an
/// empty byte array is encoded.
/// The two-byte bitset (not including the lenghth-prefix) for the row
/// [NULL, 0, 0, 0, NULL, 0, 0, NULL, 0, NULL] would be
/// [0b10010001, 0b00000010]
/// - An encoding for each non-null field, concatenated together.
///
/// Schema types are mapped to coders as follows:
/// AtomicType:
/// BYTE: not yet a standard coder (https://github.com/apache/beam/issues/19815)
/// INT16: not yet a standard coder (https://github.com/apache/beam/issues/19815)
/// INT32: beam:coder:varint:v1
/// INT64: beam:coder:varint:v1
/// FLOAT: not yet a standard coder (https://github.com/apache/beam/issues/19815)
/// DOUBLE: beam:coder:double:v1
/// STRING: beam:coder:string_utf8:v1
/// BOOLEAN: beam:coder:bool:v1
/// BYTES: beam:coder:bytes:v1
/// ArrayType: beam:coder:iterable:v1 (always has a known length)
/// MapType: not a standard coder, specification defined below.
/// RowType: beam:coder:row:v1
/// LogicalType: Uses the coder for its representation.
///
/// The MapType is encoded by:
/// - An INT32 representing the size of the map (N)
/// - Followed by N interleaved keys and values, encoded with their
/// corresponding coder.
///
/// Nullable types in container types (ArrayType, MapType) per the
/// encoding described for general Nullable types below.
///
/// Logical types understood by all SDKs should be defined in schema.proto.
/// Example of well known logical types:
/// beam:logical_type:schema:v1
/// - Representation type: BYTES
/// - A Beam Schema stored as a serialized proto.
///
/// The payload for RowCoder is an instance of Schema.
/// Components: None
case row // = 13
/// Encodes a user key and a shard id which is an opaque byte string.
///
/// The encoding for a sharded key consists of a shard id byte string and the
/// encoded user key in the following order:
///
/// - shard id using beam:coder:bytes:v1
/// - encoded user key
///
/// Examples:
/// user key with an empty shard id
/// 0x00
/// encode(user_key)
///
/// user key with a shard id taking up two bytes.
/// 0x02
/// 0x11 0x22
/// encode(user_key)
///
/// Components: the user key coder.
case shardedKey // = 15
/// Wraps a coder of a potentially null value
/// A Nullable Type is encoded by:
/// - A one byte null indicator, 0x00 for null values, or 0x01 for present
/// values.
/// - For present values the null indicator is followed by the value
/// encoded with it's corresponding coder.
/// Components: single coder for the value
case nullable // = 17
case UNRECOGNIZED(Int)
init() {
self = .bytes
}
init?(rawValue: Int) {
switch rawValue {
case 0: self = .bytes
case 1: self = .kv
case 2: self = .varint
case 3: self = .iterable
case 4: self = .timer
case 5: self = .intervalWindow
case 6: self = .lengthPrefix
case 7: self = .globalWindow
case 8: self = .windowedValue
case 9: self = .stateBackedIterable
case 10: self = .stringUtf8
case 11: self = .double
case 12: self = .bool
case 13: self = .row
case 14: self = .paramWindowedValue
case 15: self = .shardedKey
case 16: self = .customWindow
case 17: self = .nullable
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .bytes: return 0
case .kv: return 1
case .varint: return 2
case .iterable: return 3
case .timer: return 4
case .intervalWindow: return 5
case .lengthPrefix: return 6
case .globalWindow: return 7
case .windowedValue: return 8
case .stateBackedIterable: return 9
case .stringUtf8: return 10
case .double: return 11
case .bool: return 12
case .row: return 13
case .paramWindowedValue: return 14
case .shardedKey: return 15
case .customWindow: return 16
case .nullable: return 17
case .UNRECOGNIZED(let i): return i
}
}
}
init() {}
}
#if swift(>=4.2)
extension Org_Apache_Beam_Model_Pipeline_V1_StandardCoders.Enum: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Org_Apache_Beam_Model_Pipeline_V1_StandardCoders.Enum] = [
.bytes,
.stringUtf8,
.kv,
.bool,
.varint,
.double,
.iterable,
.timer,
.intervalWindow,
.lengthPrefix,
.globalWindow,
.windowedValue,
.paramWindowedValue,
.stateBackedIterable,
.customWindow,
.row,
.shardedKey,
.nullable,
]
}
#endif // swift(>=4.2)
/// A windowing strategy describes the window function, triggering, allowed
/// lateness, and accumulation mode for a PCollection.
///
/// TODO: consider inlining field on PCollection
struct Org_Apache_Beam_Model_Pipeline_V1_WindowingStrategy {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) The FunctionSpec of the UDF that assigns windows,
/// merges windows, and shifts timestamps before they are
/// combined according to the OutputTime.
var windowFn: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec {
get {return _windowFn ?? Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec()}
set {_windowFn = newValue}
}
/// Returns true if `windowFn` has been explicitly set.
var hasWindowFn: Bool {return self._windowFn != nil}
/// Clears the value of `windowFn`. Subsequent reads from it will return its default value.
mutating func clearWindowFn() {self._windowFn = nil}
/// (Required) Whether or not the window fn is merging.
///
/// This knowledge is required for many optimizations.
var mergeStatus: Org_Apache_Beam_Model_Pipeline_V1_MergeStatus.Enum = .unspecified
/// (Required) The coder for the windows of this PCollection.
var windowCoderID: String = String()
/// (Required) The trigger to use when grouping this PCollection.
var trigger: Org_Apache_Beam_Model_Pipeline_V1_Trigger {
get {return _trigger ?? Org_Apache_Beam_Model_Pipeline_V1_Trigger()}
set {_trigger = newValue}
}
/// Returns true if `trigger` has been explicitly set.
var hasTrigger: Bool {return self._trigger != nil}
/// Clears the value of `trigger`. Subsequent reads from it will return its default value.
mutating func clearTrigger() {self._trigger = nil}
/// (Required) The accumulation mode indicates whether new panes are a full
/// replacement for prior panes or whether they are deltas to be combined
/// with other panes (the combine should correspond to whatever the upstream
/// grouping transform is).
var accumulationMode: Org_Apache_Beam_Model_Pipeline_V1_AccumulationMode.Enum = .unspecified
/// (Required) The OutputTime specifies, for a grouping transform, how to
/// compute the aggregate timestamp. The window_fn will first possibly shift
/// it later, then the OutputTime takes the max, min, or ignores it and takes
/// the end of window.
///
/// This is actually only for input to grouping transforms, but since they
/// may be introduced in runner-specific ways, it is carried along with the
/// windowing strategy.
var outputTime: Org_Apache_Beam_Model_Pipeline_V1_OutputTime.Enum = .unspecified
/// (Required) Indicate when output should be omitted upon window expiration.
var closingBehavior: Org_Apache_Beam_Model_Pipeline_V1_ClosingBehavior.Enum = .unspecified
/// (Required) The duration, in milliseconds, beyond the end of a window at
/// which the window becomes droppable.
var allowedLateness: Int64 = 0
/// (Required) Indicate whether empty on-time panes should be omitted.
var onTimeBehavior: Org_Apache_Beam_Model_Pipeline_V1_OnTimeBehavior.Enum = .unspecified
/// (Required) Whether or not the window fn assigns inputs to exactly one window
///
/// This knowledge is required for some optimizations
var assignsToOneWindow: Bool = false
/// (Optional) Environment where the current window_fn should be applied in.
/// Runner that executes the pipeline may choose to override this if needed.
/// If not specified, environment will be decided by the runner.
var environmentID: String = String()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
fileprivate var _windowFn: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec? = nil
fileprivate var _trigger: Org_Apache_Beam_Model_Pipeline_V1_Trigger? = nil
}
/// Whether or not a PCollection's WindowFn is non-merging, merging, or
/// merging-but-already-merged, in which case a subsequent GroupByKey is almost
/// always going to do something the user does not want
struct Org_Apache_Beam_Model_Pipeline_V1_MergeStatus {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var unknownFields = SwiftProtobuf.UnknownStorage()
enum Enum: SwiftProtobuf.Enum {
typealias RawValue = Int
case unspecified // = 0
/// The WindowFn does not require merging.
/// Examples: global window, FixedWindows, SlidingWindows
case nonMerging // = 1
/// The WindowFn is merging and the PCollection has not had merging
/// performed.
/// Example: Sessions prior to a GroupByKey
case needsMerge // = 2
/// The WindowFn is merging and the PCollection has had merging occur
/// already.
/// Example: Sessions after a GroupByKey
case alreadyMerged // = 3
case UNRECOGNIZED(Int)
init() {
self = .unspecified
}
init?(rawValue: Int) {
switch rawValue {
case 0: self = .unspecified
case 1: self = .nonMerging
case 2: self = .needsMerge
case 3: self = .alreadyMerged
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .unspecified: return 0
case .nonMerging: return 1
case .needsMerge: return 2
case .alreadyMerged: return 3
case .UNRECOGNIZED(let i): return i
}
}
}
init() {}
}
#if swift(>=4.2)
extension Org_Apache_Beam_Model_Pipeline_V1_MergeStatus.Enum: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Org_Apache_Beam_Model_Pipeline_V1_MergeStatus.Enum] = [
.unspecified,
.nonMerging,
.needsMerge,
.alreadyMerged,
]
}
#endif // swift(>=4.2)
/// Whether or not subsequent outputs of aggregations should be entire
/// replacement values or just the aggregation of inputs received since
/// the prior output.
struct Org_Apache_Beam_Model_Pipeline_V1_AccumulationMode {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var unknownFields = SwiftProtobuf.UnknownStorage()
enum Enum: SwiftProtobuf.Enum {
typealias RawValue = Int
case unspecified // = 0
/// The aggregation is discarded when it is output
case discarding // = 1
/// The aggregation is accumulated across outputs
case accumulating // = 2
/// The aggregation emits retractions when it is output
case retracting // = 3
case UNRECOGNIZED(Int)
init() {
self = .unspecified
}
init?(rawValue: Int) {
switch rawValue {
case 0: self = .unspecified
case 1: self = .discarding
case 2: self = .accumulating
case 3: self = .retracting
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .unspecified: return 0
case .discarding: return 1
case .accumulating: return 2
case .retracting: return 3
case .UNRECOGNIZED(let i): return i
}
}
}
init() {}
}
#if swift(>=4.2)
extension Org_Apache_Beam_Model_Pipeline_V1_AccumulationMode.Enum: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Org_Apache_Beam_Model_Pipeline_V1_AccumulationMode.Enum] = [
.unspecified,
.discarding,
.accumulating,
.retracting,
]
}
#endif // swift(>=4.2)
/// Controls whether or not an aggregating transform should output data
/// when a window expires.
struct Org_Apache_Beam_Model_Pipeline_V1_ClosingBehavior {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var unknownFields = SwiftProtobuf.UnknownStorage()
enum Enum: SwiftProtobuf.Enum {
typealias RawValue = Int
case unspecified // = 0
/// Emit output when a window expires, whether or not there has been
/// any new data since the last output.
case emitAlways // = 1
/// Only emit output when new data has arrives since the last output
case emitIfNonempty // = 2
case UNRECOGNIZED(Int)
init() {
self = .unspecified
}
init?(rawValue: Int) {
switch rawValue {
case 0: self = .unspecified
case 1: self = .emitAlways
case 2: self = .emitIfNonempty
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .unspecified: return 0
case .emitAlways: return 1
case .emitIfNonempty: return 2
case .UNRECOGNIZED(let i): return i
}
}
}
init() {}
}
#if swift(>=4.2)
extension Org_Apache_Beam_Model_Pipeline_V1_ClosingBehavior.Enum: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Org_Apache_Beam_Model_Pipeline_V1_ClosingBehavior.Enum] = [
.unspecified,
.emitAlways,
.emitIfNonempty,
]
}
#endif // swift(>=4.2)
/// Controls whether or not an aggregating transform should output data
/// when an on-time pane is empty.
struct Org_Apache_Beam_Model_Pipeline_V1_OnTimeBehavior {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var unknownFields = SwiftProtobuf.UnknownStorage()
enum Enum: SwiftProtobuf.Enum {
typealias RawValue = Int
case unspecified // = 0
/// Always fire the on-time pane. Even if there is no new data since
/// the previous firing, an element will be produced.
case fireAlways // = 1
/// Only fire the on-time pane if there is new data since the previous firing.
case fireIfNonempty // = 2
case UNRECOGNIZED(Int)
init() {
self = .unspecified
}
init?(rawValue: Int) {
switch rawValue {
case 0: self = .unspecified
case 1: self = .fireAlways
case 2: self = .fireIfNonempty
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .unspecified: return 0
case .fireAlways: return 1
case .fireIfNonempty: return 2
case .UNRECOGNIZED(let i): return i
}
}
}
init() {}
}
#if swift(>=4.2)
extension Org_Apache_Beam_Model_Pipeline_V1_OnTimeBehavior.Enum: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Org_Apache_Beam_Model_Pipeline_V1_OnTimeBehavior.Enum] = [
.unspecified,
.fireAlways,
.fireIfNonempty,
]
}
#endif // swift(>=4.2)
/// When a number of windowed, timestamped inputs are aggregated, the timestamp
/// for the resulting output.
struct Org_Apache_Beam_Model_Pipeline_V1_OutputTime {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var unknownFields = SwiftProtobuf.UnknownStorage()
enum Enum: SwiftProtobuf.Enum {
typealias RawValue = Int
case unspecified // = 0
/// The output has the timestamp of the end of the window.
case endOfWindow // = 1
/// The output has the latest timestamp of the input elements since
/// the last output.
case latestInPane // = 2
/// The output has the earliest timestamp of the input elements since
/// the last output.
case earliestInPane // = 3
case UNRECOGNIZED(Int)
init() {
self = .unspecified
}
init?(rawValue: Int) {
switch rawValue {
case 0: self = .unspecified
case 1: self = .endOfWindow
case 2: self = .latestInPane
case 3: self = .earliestInPane
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .unspecified: return 0
case .endOfWindow: return 1
case .latestInPane: return 2
case .earliestInPane: return 3
case .UNRECOGNIZED(let i): return i
}
}
}
init() {}
}
#if swift(>=4.2)
extension Org_Apache_Beam_Model_Pipeline_V1_OutputTime.Enum: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Org_Apache_Beam_Model_Pipeline_V1_OutputTime.Enum] = [
.unspecified,
.endOfWindow,
.latestInPane,
.earliestInPane,
]
}
#endif // swift(>=4.2)
/// The different time domains in the Beam model.
struct Org_Apache_Beam_Model_Pipeline_V1_TimeDomain {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var unknownFields = SwiftProtobuf.UnknownStorage()
enum Enum: SwiftProtobuf.Enum {
typealias RawValue = Int
case unspecified // = 0
/// Event time is time from the perspective of the data
case eventTime // = 1
/// Processing time is time from the perspective of the
/// execution of your pipeline
case processingTime // = 2
case UNRECOGNIZED(Int)
init() {
self = .unspecified
}
init?(rawValue: Int) {
switch rawValue {
case 0: self = .unspecified
case 1: self = .eventTime
case 2: self = .processingTime
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .unspecified: return 0
case .eventTime: return 1
case .processingTime: return 2
case .UNRECOGNIZED(let i): return i
}
}
}
init() {}
}
#if swift(>=4.2)
extension Org_Apache_Beam_Model_Pipeline_V1_TimeDomain.Enum: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Org_Apache_Beam_Model_Pipeline_V1_TimeDomain.Enum] = [
.unspecified,
.eventTime,
.processingTime,
]
}
#endif // swift(>=4.2)
/// A small DSL for expressing when to emit new aggregations
/// from a GroupByKey or CombinePerKey
///
/// A trigger is described in terms of when it is _ready_ to permit output.
struct Org_Apache_Beam_Model_Pipeline_V1_Trigger {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// The full disjoint union of possible triggers.
var trigger: OneOf_Trigger? {
get {return _storage._trigger}
set {_uniqueStorage()._trigger = newValue}
}
var afterAll: Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterAll {
get {
if case .afterAll(let v)? = _storage._trigger {return v}
return Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterAll()
}
set {_uniqueStorage()._trigger = .afterAll(newValue)}
}
var afterAny: Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterAny {
get {
if case .afterAny(let v)? = _storage._trigger {return v}
return Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterAny()
}
set {_uniqueStorage()._trigger = .afterAny(newValue)}
}
var afterEach: Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterEach {
get {
if case .afterEach(let v)? = _storage._trigger {return v}
return Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterEach()
}
set {_uniqueStorage()._trigger = .afterEach(newValue)}
}
var afterEndOfWindow: Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterEndOfWindow {
get {
if case .afterEndOfWindow(let v)? = _storage._trigger {return v}
return Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterEndOfWindow()
}
set {_uniqueStorage()._trigger = .afterEndOfWindow(newValue)}
}
var afterProcessingTime: Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterProcessingTime {
get {
if case .afterProcessingTime(let v)? = _storage._trigger {return v}
return Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterProcessingTime()
}
set {_uniqueStorage()._trigger = .afterProcessingTime(newValue)}
}
var afterSynchronizedProcessingTime: Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterSynchronizedProcessingTime {
get {
if case .afterSynchronizedProcessingTime(let v)? = _storage._trigger {return v}
return Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterSynchronizedProcessingTime()
}
set {_uniqueStorage()._trigger = .afterSynchronizedProcessingTime(newValue)}
}
var always: Org_Apache_Beam_Model_Pipeline_V1_Trigger.Always {
get {
if case .always(let v)? = _storage._trigger {return v}
return Org_Apache_Beam_Model_Pipeline_V1_Trigger.Always()
}
set {_uniqueStorage()._trigger = .always(newValue)}
}
var `default`: Org_Apache_Beam_Model_Pipeline_V1_Trigger.Default {
get {
if case .default(let v)? = _storage._trigger {return v}
return Org_Apache_Beam_Model_Pipeline_V1_Trigger.Default()
}
set {_uniqueStorage()._trigger = .default(newValue)}
}
var elementCount: Org_Apache_Beam_Model_Pipeline_V1_Trigger.ElementCount {
get {
if case .elementCount(let v)? = _storage._trigger {return v}
return Org_Apache_Beam_Model_Pipeline_V1_Trigger.ElementCount()
}
set {_uniqueStorage()._trigger = .elementCount(newValue)}
}
var never: Org_Apache_Beam_Model_Pipeline_V1_Trigger.Never {
get {
if case .never(let v)? = _storage._trigger {return v}
return Org_Apache_Beam_Model_Pipeline_V1_Trigger.Never()
}
set {_uniqueStorage()._trigger = .never(newValue)}
}
var orFinally: Org_Apache_Beam_Model_Pipeline_V1_Trigger.OrFinally {
get {
if case .orFinally(let v)? = _storage._trigger {return v}
return Org_Apache_Beam_Model_Pipeline_V1_Trigger.OrFinally()
}
set {_uniqueStorage()._trigger = .orFinally(newValue)}
}
var `repeat`: Org_Apache_Beam_Model_Pipeline_V1_Trigger.Repeat {
get {
if case .repeat(let v)? = _storage._trigger {return v}
return Org_Apache_Beam_Model_Pipeline_V1_Trigger.Repeat()
}
set {_uniqueStorage()._trigger = .repeat(newValue)}
}
var unknownFields = SwiftProtobuf.UnknownStorage()
/// The full disjoint union of possible triggers.
enum OneOf_Trigger: Equatable {
case afterAll(Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterAll)
case afterAny(Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterAny)
case afterEach(Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterEach)
case afterEndOfWindow(Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterEndOfWindow)
case afterProcessingTime(Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterProcessingTime)
case afterSynchronizedProcessingTime(Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterSynchronizedProcessingTime)
case always(Org_Apache_Beam_Model_Pipeline_V1_Trigger.Always)
case `default`(Org_Apache_Beam_Model_Pipeline_V1_Trigger.Default)
case elementCount(Org_Apache_Beam_Model_Pipeline_V1_Trigger.ElementCount)
case never(Org_Apache_Beam_Model_Pipeline_V1_Trigger.Never)
case orFinally(Org_Apache_Beam_Model_Pipeline_V1_Trigger.OrFinally)
case `repeat`(Org_Apache_Beam_Model_Pipeline_V1_Trigger.Repeat)
#if !swift(>=4.1)
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger.OneOf_Trigger, rhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger.OneOf_Trigger) -> Bool {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch (lhs, rhs) {
case (.afterAll, .afterAll): return {
guard case .afterAll(let l) = lhs, case .afterAll(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.afterAny, .afterAny): return {
guard case .afterAny(let l) = lhs, case .afterAny(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.afterEach, .afterEach): return {
guard case .afterEach(let l) = lhs, case .afterEach(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.afterEndOfWindow, .afterEndOfWindow): return {
guard case .afterEndOfWindow(let l) = lhs, case .afterEndOfWindow(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.afterProcessingTime, .afterProcessingTime): return {
guard case .afterProcessingTime(let l) = lhs, case .afterProcessingTime(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.afterSynchronizedProcessingTime, .afterSynchronizedProcessingTime): return {
guard case .afterSynchronizedProcessingTime(let l) = lhs, case .afterSynchronizedProcessingTime(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.always, .always): return {
guard case .always(let l) = lhs, case .always(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.default, .default): return {
guard case .default(let l) = lhs, case .default(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.elementCount, .elementCount): return {
guard case .elementCount(let l) = lhs, case .elementCount(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.never, .never): return {
guard case .never(let l) = lhs, case .never(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.orFinally, .orFinally): return {
guard case .orFinally(let l) = lhs, case .orFinally(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.repeat, .repeat): return {
guard case .repeat(let l) = lhs, case .repeat(let r) = rhs else { preconditionFailure() }
return l == r
}()
default: return false
}
}
#endif
}
/// Ready when all subtriggers are ready.
struct AfterAll {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var subtriggers: [Org_Apache_Beam_Model_Pipeline_V1_Trigger] = []
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// Ready when any subtrigger is ready.
struct AfterAny {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var subtriggers: [Org_Apache_Beam_Model_Pipeline_V1_Trigger] = []
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// Starting with the first subtrigger, ready when the _current_ subtrigger
/// is ready. After output, advances the current trigger by one.
struct AfterEach {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var subtriggers: [Org_Apache_Beam_Model_Pipeline_V1_Trigger] = []
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// Ready after the input watermark is past the end of the window.
///
/// May have implicitly-repeated subtriggers for early and late firings.
/// When the end of the window is reached, the trigger transitions between
/// the subtriggers.
struct AfterEndOfWindow {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Optional) A trigger governing output prior to the end of the window.
var earlyFirings: Org_Apache_Beam_Model_Pipeline_V1_Trigger {
get {return _storage._earlyFirings ?? Org_Apache_Beam_Model_Pipeline_V1_Trigger()}
set {_uniqueStorage()._earlyFirings = newValue}
}
/// Returns true if `earlyFirings` has been explicitly set.
var hasEarlyFirings: Bool {return _storage._earlyFirings != nil}
/// Clears the value of `earlyFirings`. Subsequent reads from it will return its default value.
mutating func clearEarlyFirings() {_uniqueStorage()._earlyFirings = nil}
/// (Optional) A trigger governing output after the end of the window.
var lateFirings: Org_Apache_Beam_Model_Pipeline_V1_Trigger {
get {return _storage._lateFirings ?? Org_Apache_Beam_Model_Pipeline_V1_Trigger()}
set {_uniqueStorage()._lateFirings = newValue}
}
/// Returns true if `lateFirings` has been explicitly set.
var hasLateFirings: Bool {return _storage._lateFirings != nil}
/// Clears the value of `lateFirings`. Subsequent reads from it will return its default value.
mutating func clearLateFirings() {_uniqueStorage()._lateFirings = nil}
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
fileprivate var _storage = _StorageClass.defaultInstance
}
/// After input arrives, ready when the specified delay has passed.
struct AfterProcessingTime {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) The transforms to apply to an arriving element's timestamp,
/// in order
var timestampTransforms: [Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform] = []
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// Ready whenever upstream processing time has all caught up with
/// the arrival time of an input element
struct AfterSynchronizedProcessingTime {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// The default trigger. Equivalent to Repeat { AfterEndOfWindow } but
/// specially denoted to indicate the user did not alter the triggering.
struct Default {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// Ready whenever the requisite number of input elements have arrived
struct ElementCount {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var elementCount: Int32 = 0
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// Never ready. There will only be an ON_TIME output and a final
/// output at window expiration.
struct Never {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// Always ready. This can also be expressed as ElementCount(1) but
/// is more explicit.
struct Always {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// Ready whenever either of its subtriggers are ready, but finishes output
/// when the finally subtrigger fires.
struct OrFinally {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) Trigger governing main output; may fire repeatedly.
var main: Org_Apache_Beam_Model_Pipeline_V1_Trigger {
get {return _storage._main ?? Org_Apache_Beam_Model_Pipeline_V1_Trigger()}
set {_uniqueStorage()._main = newValue}
}
/// Returns true if `main` has been explicitly set.
var hasMain: Bool {return _storage._main != nil}
/// Clears the value of `main`. Subsequent reads from it will return its default value.
mutating func clearMain() {_uniqueStorage()._main = nil}
/// (Required) Trigger governing termination of output.
var finally: Org_Apache_Beam_Model_Pipeline_V1_Trigger {
get {return _storage._finally ?? Org_Apache_Beam_Model_Pipeline_V1_Trigger()}
set {_uniqueStorage()._finally = newValue}
}
/// Returns true if `finally` has been explicitly set.
var hasFinally: Bool {return _storage._finally != nil}
/// Clears the value of `finally`. Subsequent reads from it will return its default value.
mutating func clearFinally() {_uniqueStorage()._finally = nil}
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
fileprivate var _storage = _StorageClass.defaultInstance
}
/// Ready whenever the subtrigger is ready; resets state when the subtrigger
/// completes.
struct Repeat {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Require) Trigger that is run repeatedly.
var subtrigger: Org_Apache_Beam_Model_Pipeline_V1_Trigger {
get {return _storage._subtrigger ?? Org_Apache_Beam_Model_Pipeline_V1_Trigger()}
set {_uniqueStorage()._subtrigger = newValue}
}
/// Returns true if `subtrigger` has been explicitly set.
var hasSubtrigger: Bool {return _storage._subtrigger != nil}
/// Clears the value of `subtrigger`. Subsequent reads from it will return its default value.
mutating func clearSubtrigger() {_uniqueStorage()._subtrigger = nil}
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
fileprivate var _storage = _StorageClass.defaultInstance
}
init() {}
fileprivate var _storage = _StorageClass.defaultInstance
}
/// A specification for a transformation on a timestamp.
///
/// Primarily used by AfterProcessingTime triggers to transform
/// the arrival time of input to a target time for firing.
struct Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var timestampTransform: Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform.OneOf_TimestampTransform? = nil
var delay: Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform.Delay {
get {
if case .delay(let v)? = timestampTransform {return v}
return Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform.Delay()
}
set {timestampTransform = .delay(newValue)}
}
var alignTo: Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform.AlignTo {
get {
if case .alignTo(let v)? = timestampTransform {return v}
return Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform.AlignTo()
}
set {timestampTransform = .alignTo(newValue)}
}
var unknownFields = SwiftProtobuf.UnknownStorage()
enum OneOf_TimestampTransform: Equatable {
case delay(Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform.Delay)
case alignTo(Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform.AlignTo)
#if !swift(>=4.1)
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform.OneOf_TimestampTransform, rhs: Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform.OneOf_TimestampTransform) -> Bool {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch (lhs, rhs) {
case (.delay, .delay): return {
guard case .delay(let l) = lhs, case .delay(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.alignTo, .alignTo): return {
guard case .alignTo(let l) = lhs, case .alignTo(let r) = rhs else { preconditionFailure() }
return l == r
}()
default: return false
}
}
#endif
}
struct Delay {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) The delay, in milliseconds.
var delayMillis: Int64 = 0
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
struct AlignTo {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) A duration to which delays should be quantized
/// in milliseconds.
var period: Int64 = 0
/// (Required) An offset from 0 for the quantization specified by
/// alignment_size, in milliseconds
var offset: Int64 = 0
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
init() {}
}
/// A specification for how to "side input" a PCollection.
struct Org_Apache_Beam_Model_Pipeline_V1_SideInput {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) URN of the access pattern required by the `view_fn` to present
/// the desired SDK-specific interface to a UDF.
///
/// This access pattern defines the SDK harness <-> Runner Harness RPC
/// interface for accessing a side input.
///
/// See StandardSideInputTypes for an enumeration of all side input types
/// defined.
var accessPattern: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec {
get {return _accessPattern ?? Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec()}
set {_accessPattern = newValue}
}
/// Returns true if `accessPattern` has been explicitly set.
var hasAccessPattern: Bool {return self._accessPattern != nil}
/// Clears the value of `accessPattern`. Subsequent reads from it will return its default value.
mutating func clearAccessPattern() {self._accessPattern = nil}
/// (Required) The FunctionSpec of the UDF that adapts a particular
/// access_pattern to a user-facing view type.
///
/// For example, View.asSingleton() may include a `view_fn` that adapts a
/// specially-designed multimap to a single value per window.
var viewFn: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec {
get {return _viewFn ?? Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec()}
set {_viewFn = newValue}
}
/// Returns true if `viewFn` has been explicitly set.
var hasViewFn: Bool {return self._viewFn != nil}
/// Clears the value of `viewFn`. Subsequent reads from it will return its default value.
mutating func clearViewFn() {self._viewFn = nil}
/// (Required) The FunctionSpec of the UDF that maps a main input window
/// to a side input window.
///
/// For example, when the main input is in fixed windows of one hour, this
/// can specify that the side input should be accessed according to the day
/// in which that hour falls.
var windowMappingFn: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec {
get {return _windowMappingFn ?? Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec()}
set {_windowMappingFn = newValue}
}
/// Returns true if `windowMappingFn` has been explicitly set.
var hasWindowMappingFn: Bool {return self._windowMappingFn != nil}
/// Clears the value of `windowMappingFn`. Subsequent reads from it will return its default value.
mutating func clearWindowMappingFn() {self._windowMappingFn = nil}
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
fileprivate var _accessPattern: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec? = nil
fileprivate var _viewFn: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec? = nil
fileprivate var _windowMappingFn: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec? = nil
}
struct Org_Apache_Beam_Model_Pipeline_V1_StandardArtifacts {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var unknownFields = SwiftProtobuf.UnknownStorage()
enum Types: SwiftProtobuf.Enum {
typealias RawValue = Int
/// A URN for locally-accessible artifact files.
/// payload: ArtifactFilePayload
case file // = 0
/// A URN for artifacts described by URLs.
/// payload: ArtifactUrlPayload
case url // = 1
/// A URN for artifacts embedded in ArtifactInformation proto.
/// payload: EmbeddedFilePayload.
case embedded // = 2
/// A URN for Python artifacts hosted on PYPI.
/// payload: PypiPayload
case pypi // = 3
/// A URN for Java artifacts hosted on a Maven repository.
/// payload: MavenPayload
case maven // = 4
/// A URN for deferred artifacts.
/// payload: DeferredArtifactPayload
case deferred // = 5
case UNRECOGNIZED(Int)
init() {
self = .file
}
init?(rawValue: Int) {
switch rawValue {
case 0: self = .file
case 1: self = .url
case 2: self = .embedded
case 3: self = .pypi
case 4: self = .maven
case 5: self = .deferred
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .file: return 0
case .url: return 1
case .embedded: return 2
case .pypi: return 3
case .maven: return 4
case .deferred: return 5
case .UNRECOGNIZED(let i): return i
}
}
}
enum Roles: SwiftProtobuf.Enum {
typealias RawValue = Int
/// A URN for staging-to role.
/// payload: ArtifactStagingToRolePayload
case stagingTo // = 0
/// A URN for pip-requirements-file role.
/// payload: None
case pipRequirementsFile // = 1
/// A URN for the Go worker binary role.
/// This represents the executable for a Go SDK environment.
/// A Go environment may have one such artifact with this role.
/// payload: None
case goWorkerBinary // = 2
case UNRECOGNIZED(Int)
init() {
self = .stagingTo
}
init?(rawValue: Int) {
switch rawValue {
case 0: self = .stagingTo
case 1: self = .pipRequirementsFile
case 2: self = .goWorkerBinary
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .stagingTo: return 0
case .pipRequirementsFile: return 1
case .goWorkerBinary: return 2
case .UNRECOGNIZED(let i): return i
}
}
}
init() {}
}
#if swift(>=4.2)
extension Org_Apache_Beam_Model_Pipeline_V1_StandardArtifacts.Types: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Org_Apache_Beam_Model_Pipeline_V1_StandardArtifacts.Types] = [
.file,
.url,
.embedded,
.pypi,
.maven,
.deferred,
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardArtifacts.Roles: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Org_Apache_Beam_Model_Pipeline_V1_StandardArtifacts.Roles] = [
.stagingTo,
.pipRequirementsFile,
.goWorkerBinary,
]
}
#endif // swift(>=4.2)
struct Org_Apache_Beam_Model_Pipeline_V1_ArtifactFilePayload {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// a string for an artifact file path e.g. "/tmp/foo.jar"
var path: String = String()
/// The hex-encoded sha256 checksum of the artifact.
var sha256: String = String()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
struct Org_Apache_Beam_Model_Pipeline_V1_ArtifactUrlPayload {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// a string for an artifact URL e.g. "https://.../foo.jar" or "gs://tmp/foo.jar"
var url: String = String()
/// (Optional) The hex-encoded sha256 checksum of the artifact if available.
var sha256: String = String()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
struct Org_Apache_Beam_Model_Pipeline_V1_EmbeddedFilePayload {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// raw data bytes for an embedded artifact
var data: Data = Data()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
struct Org_Apache_Beam_Model_Pipeline_V1_PyPIPayload {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// Pypi compatible artifact id e.g. "apache-beam"
var artifactID: String = String()
/// Pypi compatible version string.
var version: String = String()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
struct Org_Apache_Beam_Model_Pipeline_V1_MavenPayload {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// A string specifying Maven artifact.
/// The standard format is "groupId:artifactId:version[:packaging[:classifier]]"
var artifact: String = String()
/// (Optional) Repository URL. If not specified, Maven central is used by default.
var repositoryURL: String = String()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
struct Org_Apache_Beam_Model_Pipeline_V1_DeferredArtifactPayload {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// A unique string identifier assigned by the creator of this payload. The creator may use this key to confirm
/// whether they can parse the data.
var key: String = String()
/// Data for deferred artifacts. Interpretation of bytes is delegated to the creator of this payload.
var data: Data = Data()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
struct Org_Apache_Beam_Model_Pipeline_V1_ArtifactStagingToRolePayload {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// A generated staged name (relative path under staging directory).
var stagedName: String = String()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
struct Org_Apache_Beam_Model_Pipeline_V1_ArtifactInformation {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// A URN that describes the type of artifact
var typeUrn: String = String()
var typePayload: Data = Data()
/// A URN that describes the role of artifact
var roleUrn: String = String()
var rolePayload: Data = Data()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// An environment for executing UDFs. By default, an SDK container URL, but
/// can also be a process forked by a command, or an externally managed process.
struct Org_Apache_Beam_Model_Pipeline_V1_Environment {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) The URN of the payload
var urn: String = String()
/// (Optional) The data specifying any parameters to the URN. If
/// the URN does not require any arguments, this may be omitted.
var payload: Data = Data()
/// (Optional) Static display data for the environment. If there is none,
/// it may be omitted.
var displayData: [Org_Apache_Beam_Model_Pipeline_V1_DisplayData] = []
/// (Optional) A set of capabilities this environment supports. This is
/// typically a list of common URNs designating coders, transforms, etc. that
/// this environment understands (and a runner MAY use) despite not
/// appearing in the pipeline proto. This may also be used to indicate
/// support of optional protocols not tied to a concrete component.
var capabilities: [String] = []
/// (Optional) artifact dependency information used for executing UDFs in this environment.
var dependencies: [Org_Apache_Beam_Model_Pipeline_V1_ArtifactInformation] = []
/// (Optional) A mapping of resource URNs to requested values. The encoding
/// of the values is specified by the URN. Resource hints are advisory;
/// a runner is free to ignore resource hints that it does not understand.
var resourceHints: Dictionary<String,Data> = [:]
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
struct Org_Apache_Beam_Model_Pipeline_V1_StandardEnvironments {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var unknownFields = SwiftProtobuf.UnknownStorage()
enum Environments: SwiftProtobuf.Enum {
typealias RawValue = Int
/// A managed docker container to run user code.
case docker // = 0
/// A managed native process to run user code.
case process // = 1
/// An external non managed process to run user code.
case external // = 2
/// Used as a stub when context is missing a runner-provided default environment.
case `default` // = 3
case UNRECOGNIZED(Int)
init() {
self = .docker
}
init?(rawValue: Int) {
switch rawValue {
case 0: self = .docker
case 1: self = .process
case 2: self = .external
case 3: self = .default
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .docker: return 0
case .process: return 1
case .external: return 2
case .default: return 3
case .UNRECOGNIZED(let i): return i
}
}
}
init() {}
}
#if swift(>=4.2)
extension Org_Apache_Beam_Model_Pipeline_V1_StandardEnvironments.Environments: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Org_Apache_Beam_Model_Pipeline_V1_StandardEnvironments.Environments] = [
.docker,
.process,
.external,
.default,
]
}
#endif // swift(>=4.2)
/// The payload of a Docker image
struct Org_Apache_Beam_Model_Pipeline_V1_DockerPayload {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// implicitly linux_amd64.
var containerImage: String = String()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
struct Org_Apache_Beam_Model_Pipeline_V1_ProcessPayload {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// "linux", "darwin", ..
var os: String = String()
/// "amd64", ..
var arch: String = String()
/// process to execute
var command: String = String()
/// Environment variables
var env: Dictionary<String,String> = [:]
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
struct Org_Apache_Beam_Model_Pipeline_V1_ExternalPayload {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var endpoint: Org_Apache_Beam_Model_Pipeline_V1_ApiServiceDescriptor {
get {return _endpoint ?? Org_Apache_Beam_Model_Pipeline_V1_ApiServiceDescriptor()}
set {_endpoint = newValue}
}
/// Returns true if `endpoint` has been explicitly set.
var hasEndpoint: Bool {return self._endpoint != nil}
/// Clears the value of `endpoint`. Subsequent reads from it will return its default value.
mutating func clearEndpoint() {self._endpoint = nil}
/// Arbitrary extra parameters to pass
var params: Dictionary<String,String> = [:]
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
fileprivate var _endpoint: Org_Apache_Beam_Model_Pipeline_V1_ApiServiceDescriptor? = nil
}
/// These URNs are used to indicate capabilities of environments that cannot
/// simply be expressed as a component (such as a Coder or PTransform) that this
/// environment understands.
struct Org_Apache_Beam_Model_Pipeline_V1_StandardProtocols {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var unknownFields = SwiftProtobuf.UnknownStorage()
enum Enum: SwiftProtobuf.Enum {
typealias RawValue = Int
/// Indicates suport for progress reporting via the legacy Metrics proto.
case legacyProgressReporting // = 0
/// Indicates suport for progress reporting via the new MonitoringInfo proto.
case progressReporting // = 1
/// Indicates suport for worker status protocol defined at
/// https://s.apache.org/beam-fn-api-harness-status.
case workerStatus // = 2
/// Indicates this SDK can take advantage of multiple cores when processing
/// concurrent process bundle requests. (Note that all SDKs must process
/// an unbounded number of concurrent process bundle requests; this capability
/// simply indicates this SDK can actually parallelize the work across multiple
/// cores.
case multiCoreBundleProcessing // = 3
/// Indicates this SDK can cheaply spawn sibling workers (e.g. within the
/// same container) to work around the fact that it cannot take advantage
/// of multiple cores (i.e. MULTI_CORE_BUNDLE_PROCESSING is not set).
case siblingWorkers // = 5
/// Indicates that this SDK handles the InstructionRequest of type
/// HarnessMonitoringInfosRequest.
/// A request to provide full MonitoringInfo data associated with
/// the entire SDK harness process, not specific to a bundle.
case harnessMonitoringInfos // = 4
/// Indicates that this SDK can process elements embedded in the
/// ProcessBundleRequest. See more about the protocol at
/// https://s.apache.org/beam-fn-api-control-data-embedding
case controlRequestElementsEmbedding // = 6
/// Indicates that this SDK can cache user state and side inputs across
/// bundle boundaries. This is a hint to runners that runners can rely on the
/// SDKs ability to store the data in memory reducing the amount of memory
/// used overall.
case stateCaching // = 7
/// Indicates that this SDK can sample in-flight elements. These samples can
/// then be queried using the SampleDataRequest. Samples are uniquely associated
/// with a PCollection. Meaning, samples are taken for each PCollection
/// during bundle processing. This is disabled by default and enabled with the
/// `enable_data_sampling` experiment.
case dataSampling // = 8
case UNRECOGNIZED(Int)
init() {
self = .legacyProgressReporting
}
init?(rawValue: Int) {
switch rawValue {
case 0: self = .legacyProgressReporting
case 1: self = .progressReporting
case 2: self = .workerStatus
case 3: self = .multiCoreBundleProcessing
case 4: self = .harnessMonitoringInfos
case 5: self = .siblingWorkers
case 6: self = .controlRequestElementsEmbedding
case 7: self = .stateCaching
case 8: self = .dataSampling
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .legacyProgressReporting: return 0
case .progressReporting: return 1
case .workerStatus: return 2
case .multiCoreBundleProcessing: return 3
case .harnessMonitoringInfos: return 4
case .siblingWorkers: return 5
case .controlRequestElementsEmbedding: return 6
case .stateCaching: return 7
case .dataSampling: return 8
case .UNRECOGNIZED(let i): return i
}
}
}
init() {}
}
#if swift(>=4.2)
extension Org_Apache_Beam_Model_Pipeline_V1_StandardProtocols.Enum: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Org_Apache_Beam_Model_Pipeline_V1_StandardProtocols.Enum] = [
.legacyProgressReporting,
.progressReporting,
.workerStatus,
.multiCoreBundleProcessing,
.siblingWorkers,
.harnessMonitoringInfos,
.controlRequestElementsEmbedding,
.stateCaching,
.dataSampling,
]
}
#endif // swift(>=4.2)
/// These URNs are used to indicate capabilities of runner that an environment
/// may take advantage of when interacting with this runner.
struct Org_Apache_Beam_Model_Pipeline_V1_StandardRunnerProtocols {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var unknownFields = SwiftProtobuf.UnknownStorage()
enum Enum: SwiftProtobuf.Enum {
typealias RawValue = Int
/// Indicates suport the MonitoringInfo short id protocol.
case monitoringInfoShortIds // = 0
/// Indicates that this runner can process elements embedded in the
/// ProcessBundleResponse. See more about the protocol at
/// https://s.apache.org/beam-fn-api-control-data-embedding
case controlResponseElementsEmbedding // = 6
case UNRECOGNIZED(Int)
init() {
self = .monitoringInfoShortIds
}
init?(rawValue: Int) {
switch rawValue {
case 0: self = .monitoringInfoShortIds
case 6: self = .controlResponseElementsEmbedding
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .monitoringInfoShortIds: return 0
case .controlResponseElementsEmbedding: return 6
case .UNRECOGNIZED(let i): return i
}
}
}
init() {}
}
#if swift(>=4.2)
extension Org_Apache_Beam_Model_Pipeline_V1_StandardRunnerProtocols.Enum: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Org_Apache_Beam_Model_Pipeline_V1_StandardRunnerProtocols.Enum] = [
.monitoringInfoShortIds,
.controlResponseElementsEmbedding,
]
}
#endif // swift(>=4.2)
/// These URNs are used to indicate requirements of a pipeline that cannot
/// simply be expressed as a component (such as a Coder or PTransform) that the
/// runner must understand. In many cases, this indicates a particular field
/// of a transform must be inspected and respected (which allows new fields
/// to be added in a forwards-compatible way).
struct Org_Apache_Beam_Model_Pipeline_V1_StandardRequirements {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var unknownFields = SwiftProtobuf.UnknownStorage()
enum Enum: SwiftProtobuf.Enum {
typealias RawValue = Int
/// This requirement indicates the state_specs and timer_family_specs fields of ParDo
/// transform payloads must be inspected.
case requiresStatefulProcessing // = 0
/// This requirement indicates the requests_finalization field of ParDo
/// transform payloads must be inspected.
case requiresBundleFinalization // = 1
/// This requirement indicates the requires_stable_input field of ParDo
/// transform payloads must be inspected.
case requiresStableInput // = 2
/// This requirement indicates the requires_time_sorted_input field of ParDo
/// transform payloads must be inspected.
case requiresTimeSortedInput // = 3
/// This requirement indicates the restriction_coder_id field of ParDo
/// transform payloads must be inspected.
case requiresSplittableDofn // = 4
/// This requirement indicates that the on_window_expiration_timer_family_spec field
/// of ParDo transform payloads must be inspected.
case requiresOnWindowExpiration // = 5
case UNRECOGNIZED(Int)
init() {
self = .requiresStatefulProcessing
}
init?(rawValue: Int) {
switch rawValue {
case 0: self = .requiresStatefulProcessing
case 1: self = .requiresBundleFinalization
case 2: self = .requiresStableInput
case 3: self = .requiresTimeSortedInput
case 4: self = .requiresSplittableDofn
case 5: self = .requiresOnWindowExpiration
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .requiresStatefulProcessing: return 0
case .requiresBundleFinalization: return 1
case .requiresStableInput: return 2
case .requiresTimeSortedInput: return 3
case .requiresSplittableDofn: return 4
case .requiresOnWindowExpiration: return 5
case .UNRECOGNIZED(let i): return i
}
}
}
init() {}
}
#if swift(>=4.2)
extension Org_Apache_Beam_Model_Pipeline_V1_StandardRequirements.Enum: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Org_Apache_Beam_Model_Pipeline_V1_StandardRequirements.Enum] = [
.requiresStatefulProcessing,
.requiresBundleFinalization,
.requiresStableInput,
.requiresTimeSortedInput,
.requiresSplittableDofn,
.requiresOnWindowExpiration,
]
}
#endif // swift(>=4.2)
/// A URN along with a parameter object whose schema is determined by the
/// URN.
///
/// This structure is reused in two distinct, but compatible, ways:
///
/// 1. This can be a specification of the function over PCollections
/// that a PTransform computes.
/// 2. This can be a specification of a user-defined function, possibly
/// SDK-specific. (external to this message must be adequate context
/// to indicate the environment in which the UDF can be understood).
///
/// Though not explicit in this proto, there are two possibilities
/// for the relationship of a runner to this specification that
/// one should bear in mind:
///
/// 1. The runner understands the URN. For example, it might be
/// a well-known URN like "beam:transform:Top" or
/// "beam:window_fn:FixedWindows" with
/// an agreed-upon payload (e.g. a number or duration,
/// respectively).
/// 2. The runner does not understand the URN. It might be an
/// SDK specific URN such as "beam:dofn:javasdk:1.0"
/// that indicates to the SDK what the payload is,
/// such as a serialized Java DoFn from a particular
/// version of the Beam Java SDK. The payload will often
/// then be an opaque message such as bytes in a
/// language-specific serialization format.
struct Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) A URN that describes the accompanying payload.
/// For any URN that is not recognized (by whomever is inspecting
/// it) the parameter payload should be treated as opaque and
/// passed as-is.
var urn: String = String()
/// (Optional) The data specifying any parameters to the URN. If
/// the URN does not require any arguments, this may be omitted.
var payload: Data = Data()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// A set of well known URNs describing display data.
///
/// All descriptions must contain how the value should be classified and how it
/// is encoded. Note that some types are logical types which convey contextual
/// information about the pipeline in addition to an encoding while others only
/// specify the encoding itself.
struct Org_Apache_Beam_Model_Pipeline_V1_StandardDisplayData {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var unknownFields = SwiftProtobuf.UnknownStorage()
enum DisplayData: SwiftProtobuf.Enum {
typealias RawValue = Int
/// A string label and value. Has a payload containing an encoded
/// LabelledPayload.
case labelled // = 0
case UNRECOGNIZED(Int)
init() {
self = .labelled
}
init?(rawValue: Int) {
switch rawValue {
case 0: self = .labelled
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .labelled: return 0
case .UNRECOGNIZED(let i): return i
}
}
}
init() {}
}
#if swift(>=4.2)
extension Org_Apache_Beam_Model_Pipeline_V1_StandardDisplayData.DisplayData: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Org_Apache_Beam_Model_Pipeline_V1_StandardDisplayData.DisplayData] = [
.labelled,
]
}
#endif // swift(>=4.2)
struct Org_Apache_Beam_Model_Pipeline_V1_LabelledPayload {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) A human readable label for the value.
var label: String = String()
/// (Required) A value which will be displayed to the user.
var value: Org_Apache_Beam_Model_Pipeline_V1_LabelledPayload.OneOf_Value? = nil
var stringValue: String {
get {
if case .stringValue(let v)? = value {return v}
return String()
}
set {value = .stringValue(newValue)}
}
var boolValue: Bool {
get {
if case .boolValue(let v)? = value {return v}
return false
}
set {value = .boolValue(newValue)}
}
var doubleValue: Double {
get {
if case .doubleValue(let v)? = value {return v}
return 0
}
set {value = .doubleValue(newValue)}
}
var intValue: Int64 {
get {
if case .intValue(let v)? = value {return v}
return 0
}
set {value = .intValue(newValue)}
}
/// (Required) The key identifies the actual content of the metadata.
var key: String = String()
/// (Required) The namespace describes the context that specified the key.
var namespace: String = String()
var unknownFields = SwiftProtobuf.UnknownStorage()
/// (Required) A value which will be displayed to the user.
enum OneOf_Value: Equatable {
case stringValue(String)
case boolValue(Bool)
case doubleValue(Double)
case intValue(Int64)
#if !swift(>=4.1)
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_LabelledPayload.OneOf_Value, rhs: Org_Apache_Beam_Model_Pipeline_V1_LabelledPayload.OneOf_Value) -> Bool {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch (lhs, rhs) {
case (.stringValue, .stringValue): return {
guard case .stringValue(let l) = lhs, case .stringValue(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.boolValue, .boolValue): return {
guard case .boolValue(let l) = lhs, case .boolValue(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.doubleValue, .doubleValue): return {
guard case .doubleValue(let l) = lhs, case .doubleValue(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.intValue, .intValue): return {
guard case .intValue(let l) = lhs, case .intValue(let r) = rhs else { preconditionFailure() }
return l == r
}()
default: return false
}
}
#endif
}
init() {}
}
/// Static display data associated with a pipeline component. Display data is
/// useful for pipeline runners IOs and diagnostic dashboards to display details
/// about annotated components.
struct Org_Apache_Beam_Model_Pipeline_V1_DisplayData {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// A key used to describe the type of display data. See StandardDisplayData
/// for the set of well known urns describing how the payload is meant to be
/// interpreted.
var urn: String = String()
/// (Optional) The data specifying any parameters to the URN. If
/// the URN does not require any arguments, this may be omitted.
var payload: Data = Data()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// A disjoint union of all the things that may contain references
/// that require Components to resolve.
struct Org_Apache_Beam_Model_Pipeline_V1_MessageWithComponents {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Optional) The by-reference components of the root message,
/// enabling a standalone message.
///
/// If this is absent, it is expected that there are no
/// references.
var components: Org_Apache_Beam_Model_Pipeline_V1_Components {
get {return _storage._components ?? Org_Apache_Beam_Model_Pipeline_V1_Components()}
set {_uniqueStorage()._components = newValue}
}
/// Returns true if `components` has been explicitly set.
var hasComponents: Bool {return _storage._components != nil}
/// Clears the value of `components`. Subsequent reads from it will return its default value.
mutating func clearComponents() {_uniqueStorage()._components = nil}
/// (Required) The root message that may contain pointers
/// that should be resolved by looking inside components.
var root: OneOf_Root? {
get {return _storage._root}
set {_uniqueStorage()._root = newValue}
}
var coder: Org_Apache_Beam_Model_Pipeline_V1_Coder {
get {
if case .coder(let v)? = _storage._root {return v}
return Org_Apache_Beam_Model_Pipeline_V1_Coder()
}
set {_uniqueStorage()._root = .coder(newValue)}
}
var combinePayload: Org_Apache_Beam_Model_Pipeline_V1_CombinePayload {
get {
if case .combinePayload(let v)? = _storage._root {return v}
return Org_Apache_Beam_Model_Pipeline_V1_CombinePayload()
}
set {_uniqueStorage()._root = .combinePayload(newValue)}
}
var functionSpec: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec {
get {
if case .functionSpec(let v)? = _storage._root {return v}
return Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec()
}
set {_uniqueStorage()._root = .functionSpec(newValue)}
}
var parDoPayload: Org_Apache_Beam_Model_Pipeline_V1_ParDoPayload {
get {
if case .parDoPayload(let v)? = _storage._root {return v}
return Org_Apache_Beam_Model_Pipeline_V1_ParDoPayload()
}
set {_uniqueStorage()._root = .parDoPayload(newValue)}
}
var ptransform: Org_Apache_Beam_Model_Pipeline_V1_PTransform {
get {
if case .ptransform(let v)? = _storage._root {return v}
return Org_Apache_Beam_Model_Pipeline_V1_PTransform()
}
set {_uniqueStorage()._root = .ptransform(newValue)}
}
var pcollection: Org_Apache_Beam_Model_Pipeline_V1_PCollection {
get {
if case .pcollection(let v)? = _storage._root {return v}
return Org_Apache_Beam_Model_Pipeline_V1_PCollection()
}
set {_uniqueStorage()._root = .pcollection(newValue)}
}
var readPayload: Org_Apache_Beam_Model_Pipeline_V1_ReadPayload {
get {
if case .readPayload(let v)? = _storage._root {return v}
return Org_Apache_Beam_Model_Pipeline_V1_ReadPayload()
}
set {_uniqueStorage()._root = .readPayload(newValue)}
}
var sideInput: Org_Apache_Beam_Model_Pipeline_V1_SideInput {
get {
if case .sideInput(let v)? = _storage._root {return v}
return Org_Apache_Beam_Model_Pipeline_V1_SideInput()
}
set {_uniqueStorage()._root = .sideInput(newValue)}
}
var windowIntoPayload: Org_Apache_Beam_Model_Pipeline_V1_WindowIntoPayload {
get {
if case .windowIntoPayload(let v)? = _storage._root {return v}
return Org_Apache_Beam_Model_Pipeline_V1_WindowIntoPayload()
}
set {_uniqueStorage()._root = .windowIntoPayload(newValue)}
}
var windowingStrategy: Org_Apache_Beam_Model_Pipeline_V1_WindowingStrategy {
get {
if case .windowingStrategy(let v)? = _storage._root {return v}
return Org_Apache_Beam_Model_Pipeline_V1_WindowingStrategy()
}
set {_uniqueStorage()._root = .windowingStrategy(newValue)}
}
var unknownFields = SwiftProtobuf.UnknownStorage()
/// (Required) The root message that may contain pointers
/// that should be resolved by looking inside components.
enum OneOf_Root: Equatable {
case coder(Org_Apache_Beam_Model_Pipeline_V1_Coder)
case combinePayload(Org_Apache_Beam_Model_Pipeline_V1_CombinePayload)
case functionSpec(Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec)
case parDoPayload(Org_Apache_Beam_Model_Pipeline_V1_ParDoPayload)
case ptransform(Org_Apache_Beam_Model_Pipeline_V1_PTransform)
case pcollection(Org_Apache_Beam_Model_Pipeline_V1_PCollection)
case readPayload(Org_Apache_Beam_Model_Pipeline_V1_ReadPayload)
case sideInput(Org_Apache_Beam_Model_Pipeline_V1_SideInput)
case windowIntoPayload(Org_Apache_Beam_Model_Pipeline_V1_WindowIntoPayload)
case windowingStrategy(Org_Apache_Beam_Model_Pipeline_V1_WindowingStrategy)
#if !swift(>=4.1)
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_MessageWithComponents.OneOf_Root, rhs: Org_Apache_Beam_Model_Pipeline_V1_MessageWithComponents.OneOf_Root) -> Bool {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch (lhs, rhs) {
case (.coder, .coder): return {
guard case .coder(let l) = lhs, case .coder(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.combinePayload, .combinePayload): return {
guard case .combinePayload(let l) = lhs, case .combinePayload(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.functionSpec, .functionSpec): return {
guard case .functionSpec(let l) = lhs, case .functionSpec(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.parDoPayload, .parDoPayload): return {
guard case .parDoPayload(let l) = lhs, case .parDoPayload(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.ptransform, .ptransform): return {
guard case .ptransform(let l) = lhs, case .ptransform(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.pcollection, .pcollection): return {
guard case .pcollection(let l) = lhs, case .pcollection(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.readPayload, .readPayload): return {
guard case .readPayload(let l) = lhs, case .readPayload(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.sideInput, .sideInput): return {
guard case .sideInput(let l) = lhs, case .sideInput(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.windowIntoPayload, .windowIntoPayload): return {
guard case .windowIntoPayload(let l) = lhs, case .windowIntoPayload(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.windowingStrategy, .windowingStrategy): return {
guard case .windowingStrategy(let l) = lhs, case .windowingStrategy(let r) = rhs else { preconditionFailure() }
return l == r
}()
default: return false
}
}
#endif
}
init() {}
fileprivate var _storage = _StorageClass.defaultInstance
}
/// The payload for an executable stage. This will eventually be passed to an SDK in the form of a
/// ProcessBundleDescriptor.
struct Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) Environment in which this stage executes.
///
/// We use an environment rather than environment id
/// because ExecutableStages use environments directly. This may change in the future.
var environment: Org_Apache_Beam_Model_Pipeline_V1_Environment {
get {return _storage._environment ?? Org_Apache_Beam_Model_Pipeline_V1_Environment()}
set {_uniqueStorage()._environment = newValue}
}
/// Returns true if `environment` has been explicitly set.
var hasEnvironment: Bool {return _storage._environment != nil}
/// Clears the value of `environment`. Subsequent reads from it will return its default value.
mutating func clearEnvironment() {_uniqueStorage()._environment = nil}
/// The wire coder settings of this executable stage
var wireCoderSettings: [Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.WireCoderSetting] {
get {return _storage._wireCoderSettings}
set {_uniqueStorage()._wireCoderSettings = newValue}
}
/// (Required) Input PCollection id. This must be present as a value in the inputs of any
/// PTransform the ExecutableStagePayload is the payload of.
var input: String {
get {return _storage._input}
set {_uniqueStorage()._input = newValue}
}
/// The side inputs required for this executable stage. Each side input of each PTransform within
/// this ExecutableStagePayload must be represented within this field.
var sideInputs: [Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.SideInputId] {
get {return _storage._sideInputs}
set {_uniqueStorage()._sideInputs = newValue}
}
/// PTransform ids contained within this executable stage. This must contain at least one
/// PTransform id.
var transforms: [String] {
get {return _storage._transforms}
set {_uniqueStorage()._transforms = newValue}
}
/// Output PCollection ids. This must be equal to the values of the outputs of any
/// PTransform the ExecutableStagePayload is the payload of.
var outputs: [String] {
get {return _storage._outputs}
set {_uniqueStorage()._outputs = newValue}
}
/// (Required) The components for the Executable Stage. This must contain all of the Transforms
/// in transforms, and the closure of all of the components they recognize.
var components: Org_Apache_Beam_Model_Pipeline_V1_Components {
get {return _storage._components ?? Org_Apache_Beam_Model_Pipeline_V1_Components()}
set {_uniqueStorage()._components = newValue}
}
/// Returns true if `components` has been explicitly set.
var hasComponents: Bool {return _storage._components != nil}
/// Clears the value of `components`. Subsequent reads from it will return its default value.
mutating func clearComponents() {_uniqueStorage()._components = nil}
/// The user states required for this executable stage. Each user state of each PTransform within
/// this ExecutableStagePayload must be represented within this field.
var userStates: [Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.UserStateId] {
get {return _storage._userStates}
set {_uniqueStorage()._userStates = newValue}
}
/// The timers required for this executable stage. Each timer of each PTransform within
/// this ExecutableStagePayload must be represented within this field.
var timers: [Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.TimerId] {
get {return _storage._timers}
set {_uniqueStorage()._timers = newValue}
}
/// The timerfamilies required for this executable stage. Each timer familyof each PTransform within
/// this ExecutableStagePayload must be represented within this field.
var timerFamilies: [Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.TimerFamilyId] {
get {return _storage._timerFamilies}
set {_uniqueStorage()._timerFamilies = newValue}
}
var unknownFields = SwiftProtobuf.UnknownStorage()
/// A reference to a side input. Side inputs are uniquely identified by PTransform id and
/// local name.
struct SideInputId {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) The id of the PTransform that references this side input.
var transformID: String = String()
/// (Required) The local name of this side input from the PTransform that references it.
var localName: String = String()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// A reference to user state. User states are uniquely identified by PTransform id and
/// local name.
struct UserStateId {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) The id of the PTransform that references this user state.
var transformID: String = String()
/// (Required) The local name of this user state for the PTransform that references it.
var localName: String = String()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// A reference to a timer. Timers are uniquely identified by PTransform id and
/// local name.
struct TimerId {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) The id of the PTransform that references this timer.
var transformID: String = String()
/// (Required) The local name of this timer for the PTransform that references it.
var localName: String = String()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// A reference to a timer. Timers are uniquely identified by PTransform id and
/// local name.
struct TimerFamilyId {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) The id of the PTransform that references this timer family.
var transformID: String = String()
/// (Required) The local name of this timer family for the PTransform that references it.
var localName: String = String()
var unknownFields = SwiftProtobuf.UnknownStorage()
init() {}
}
/// Settings that decide the coder type of wire coder.
struct WireCoderSetting {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
/// (Required) The URN of the wire coder.
/// Note that only windowed value coder or parameterized windowed value coder are supported.
var urn: String = String()
/// (Optional) The data specifying any parameters to the URN. If
/// the URN is beam:coder:windowed_value:v1, this may be omitted. If the URN is
/// beam:coder:param_windowed_value:v1, the payload is an encoded windowed
/// value using the beam:coder:windowed_value:v1 coder parameterized by
/// a beam:coder:bytes:v1 element coder and the window coder that this
/// param_windowed_value coder uses.
var payload: Data = Data()
/// (Required) The target(PCollection or Timer) this setting applies to.
var target: Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.WireCoderSetting.OneOf_Target? = nil
/// The input or output PCollection id this setting applies to.
var inputOrOutputID: String {
get {
if case .inputOrOutputID(let v)? = target {return v}
return String()
}
set {target = .inputOrOutputID(newValue)}
}
/// The timer id this setting applies to.
var timer: Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.TimerId {
get {
if case .timer(let v)? = target {return v}
return Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.TimerId()
}
set {target = .timer(newValue)}
}
var unknownFields = SwiftProtobuf.UnknownStorage()
/// (Required) The target(PCollection or Timer) this setting applies to.
enum OneOf_Target: Equatable {
/// The input or output PCollection id this setting applies to.
case inputOrOutputID(String)
/// The timer id this setting applies to.
case timer(Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.TimerId)
#if !swift(>=4.1)
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.WireCoderSetting.OneOf_Target, rhs: Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.WireCoderSetting.OneOf_Target) -> Bool {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch (lhs, rhs) {
case (.inputOrOutputID, .inputOrOutputID): return {
guard case .inputOrOutputID(let l) = lhs, case .inputOrOutputID(let r) = rhs else { preconditionFailure() }
return l == r
}()
case (.timer, .timer): return {
guard case .timer(let l) = lhs, case .timer(let r) = rhs else { preconditionFailure() }
return l == r
}()
default: return false
}
}
#endif
}
init() {}
}
init() {}
fileprivate var _storage = _StorageClass.defaultInstance
}
/// See https://beam.apache.org/documentation/runtime/resource-hints/ for additional documentation
/// on the behavior of StandardResourceHint.
struct Org_Apache_Beam_Model_Pipeline_V1_StandardResourceHints {
// SwiftProtobuf.Message conformance is added in an extension below. See the
// `Message` and `Message+*Additions` files in the SwiftProtobuf library for
// methods supported on all messages.
var unknownFields = SwiftProtobuf.UnknownStorage()
enum Enum: SwiftProtobuf.Enum {
typealias RawValue = Int
/// Describes hardware accelerators that are desired to have in the execution environment.
/// Payload: ASCII encoded string with the following format: "type:<type>;count:<n>;<options>" where type
/// is an accelerator sku, count is the number of accelerators per worker, and options are
/// related options flags.
case accelerator // = 0
/// Describes desired minimal available RAM size in transform's execution environment.
/// SDKs should convert the size to bytes, but can allow users to specify human-friendly units (e.g. GiB).
/// Payload: ASCII encoded string of the base 10 representation of an integer number of bytes.
case minRamBytes // = 1
case UNRECOGNIZED(Int)
init() {
self = .accelerator
}
init?(rawValue: Int) {
switch rawValue {
case 0: self = .accelerator
case 1: self = .minRamBytes
default: self = .UNRECOGNIZED(rawValue)
}
}
var rawValue: Int {
switch self {
case .accelerator: return 0
case .minRamBytes: return 1
case .UNRECOGNIZED(let i): return i
}
}
}
init() {}
}
#if swift(>=4.2)
extension Org_Apache_Beam_Model_Pipeline_V1_StandardResourceHints.Enum: CaseIterable {
// The compiler won't synthesize support with the UNRECOGNIZED case.
static var allCases: [Org_Apache_Beam_Model_Pipeline_V1_StandardResourceHints.Enum] = [
.accelerator,
.minRamBytes,
]
}
#endif // swift(>=4.2)
#if swift(>=5.5) && canImport(_Concurrency)
extension Org_Apache_Beam_Model_Pipeline_V1_BeamConstants: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_BeamConstants.Constants: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_Components: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_Pipeline: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_PTransform: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms.Primitives: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms.DeprecatedPrimitives: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms.Composites: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms.CombineComponents: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms.SplittableParDoComponents: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms.GroupIntoBatchesComponents: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardSideInputTypes: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardSideInputTypes.Enum: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardUserStateTypes: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardUserStateTypes.Enum: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_PCollection: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_ParDoPayload: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StateSpec: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StateSpec.OneOf_Spec: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_ReadModifyWriteStateSpec: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_BagStateSpec: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_OrderedListStateSpec: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_CombiningStateSpec: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_MapStateSpec: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_MultimapStateSpec: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_SetStateSpec: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_TimerFamilySpec: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_IsBounded: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_IsBounded.Enum: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_ReadPayload: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_WindowIntoPayload: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_CombinePayload: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.OneOf_Event: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.AdvanceWatermark: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.AdvanceProcessingTime: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.AddElements: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.TimestampedElement: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_EventsRequest: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_WriteFilesPayload: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_PubSubReadPayload: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_PubSubWritePayload: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_GroupIntoBatchesPayload: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_Coder: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardCoders: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardCoders.Enum: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_WindowingStrategy: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_MergeStatus: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_MergeStatus.Enum: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_AccumulationMode: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_AccumulationMode.Enum: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_ClosingBehavior: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_ClosingBehavior.Enum: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_OnTimeBehavior: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_OnTimeBehavior.Enum: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_OutputTime: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_OutputTime.Enum: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_TimeDomain: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_TimeDomain.Enum: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger.OneOf_Trigger: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterAll: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterAny: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterEach: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterEndOfWindow: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterProcessingTime: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterSynchronizedProcessingTime: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger.Default: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger.ElementCount: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger.Never: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger.Always: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger.OrFinally: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger.Repeat: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform.OneOf_TimestampTransform: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform.Delay: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform.AlignTo: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_SideInput: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardArtifacts: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardArtifacts.Types: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardArtifacts.Roles: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_ArtifactFilePayload: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_ArtifactUrlPayload: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_EmbeddedFilePayload: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_PyPIPayload: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_MavenPayload: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_DeferredArtifactPayload: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_ArtifactStagingToRolePayload: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_ArtifactInformation: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_Environment: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardEnvironments: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardEnvironments.Environments: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_DockerPayload: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_ProcessPayload: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_ExternalPayload: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardProtocols: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardProtocols.Enum: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardRunnerProtocols: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardRunnerProtocols.Enum: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardRequirements: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardRequirements.Enum: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardDisplayData: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardDisplayData.DisplayData: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_LabelledPayload: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_LabelledPayload.OneOf_Value: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_DisplayData: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_MessageWithComponents: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_MessageWithComponents.OneOf_Root: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.SideInputId: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.UserStateId: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.TimerId: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.TimerFamilyId: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.WireCoderSetting: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.WireCoderSetting.OneOf_Target: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardResourceHints: @unchecked Sendable {}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardResourceHints.Enum: @unchecked Sendable {}
#endif // swift(>=5.5) && canImport(_Concurrency)
// MARK: - Extension support defined in beam_runner_api.proto.
// MARK: - Extension Properties
// Swift Extensions on the exteneded Messages to add easy access to the declared
// extension fields. The names are based on the extension field name from the proto
// declaration. To avoid naming collisions, the names are prefixed with the name of
// the scope where the extend directive occurs.
extension SwiftProtobuf.Google_Protobuf_EnumValueOptions {
/// An extension to be used for specifying the standard URN of various
/// pipeline entities, e.g. transforms, functions, coders etc.
/// Code should refer to the URNs of those entities by extracting
/// it from the (beam_urn) extension, rather than by hard-coding
/// the URN.
///
/// The recommended pattern for declaring it is (exemplified by coders):
///
/// message StandardCoders {
/// enum Enum {
/// BYTES = 0 [(beam_urn) = "beam:coder:bytes:v1"];
/// ...
/// }
/// }
///
/// If there are multiple categories of entities of this type, use the
/// following pattern (exemplified by PTransforms):
///
/// message StandardPTransforms {
/// enum Primitives {
/// ...
/// }
/// enum Composites {
/// ...
/// }
/// }
var Org_Apache_Beam_Model_Pipeline_V1_beamUrn: String {
get {return getExtensionValue(ext: Org_Apache_Beam_Model_Pipeline_V1_Extensions_beam_urn) ?? String()}
set {setExtensionValue(ext: Org_Apache_Beam_Model_Pipeline_V1_Extensions_beam_urn, value: newValue)}
}
/// Returns true if extension `Org_Apache_Beam_Model_Pipeline_V1_Extensions_beam_urn`
/// has been explicitly set.
var hasOrg_Apache_Beam_Model_Pipeline_V1_beamUrn: Bool {
return hasExtensionValue(ext: Org_Apache_Beam_Model_Pipeline_V1_Extensions_beam_urn)
}
/// Clears the value of extension `Org_Apache_Beam_Model_Pipeline_V1_Extensions_beam_urn`.
/// Subsequent reads from it will return its default value.
mutating func clearOrg_Apache_Beam_Model_Pipeline_V1_beamUrn() {
clearExtensionValue(ext: Org_Apache_Beam_Model_Pipeline_V1_Extensions_beam_urn)
}
/// A value to store other constants
var Org_Apache_Beam_Model_Pipeline_V1_beamConstant: String {
get {return getExtensionValue(ext: Org_Apache_Beam_Model_Pipeline_V1_Extensions_beam_constant) ?? String()}
set {setExtensionValue(ext: Org_Apache_Beam_Model_Pipeline_V1_Extensions_beam_constant, value: newValue)}
}
/// Returns true if extension `Org_Apache_Beam_Model_Pipeline_V1_Extensions_beam_constant`
/// has been explicitly set.
var hasOrg_Apache_Beam_Model_Pipeline_V1_beamConstant: Bool {
return hasExtensionValue(ext: Org_Apache_Beam_Model_Pipeline_V1_Extensions_beam_constant)
}
/// Clears the value of extension `Org_Apache_Beam_Model_Pipeline_V1_Extensions_beam_constant`.
/// Subsequent reads from it will return its default value.
mutating func clearOrg_Apache_Beam_Model_Pipeline_V1_beamConstant() {
clearExtensionValue(ext: Org_Apache_Beam_Model_Pipeline_V1_Extensions_beam_constant)
}
}
// MARK: - File's ExtensionMap: Org_Apache_Beam_Model_Pipeline_V1_BeamRunnerApi_Extensions
/// A `SwiftProtobuf.SimpleExtensionMap` that includes all of the extensions defined by
/// this .proto file. It can be used any place an `SwiftProtobuf.ExtensionMap` is needed
/// in parsing, or it can be combined with other `SwiftProtobuf.SimpleExtensionMap`s to create
/// a larger `SwiftProtobuf.SimpleExtensionMap`.
let Org_Apache_Beam_Model_Pipeline_V1_BeamRunnerApi_Extensions: SwiftProtobuf.SimpleExtensionMap = [
Org_Apache_Beam_Model_Pipeline_V1_Extensions_beam_urn,
Org_Apache_Beam_Model_Pipeline_V1_Extensions_beam_constant
]
// Extension Objects - The only reason these might be needed is when manually
// constructing a `SimpleExtensionMap`, otherwise, use the above _Extension Properties_
// accessors for the extension fields on the messages directly.
/// An extension to be used for specifying the standard URN of various
/// pipeline entities, e.g. transforms, functions, coders etc.
/// Code should refer to the URNs of those entities by extracting
/// it from the (beam_urn) extension, rather than by hard-coding
/// the URN.
///
/// The recommended pattern for declaring it is (exemplified by coders):
///
/// message StandardCoders {
/// enum Enum {
/// BYTES = 0 [(beam_urn) = "beam:coder:bytes:v1"];
/// ...
/// }
/// }
///
/// If there are multiple categories of entities of this type, use the
/// following pattern (exemplified by PTransforms):
///
/// message StandardPTransforms {
/// enum Primitives {
/// ...
/// }
/// enum Composites {
/// ...
/// }
/// }
let Org_Apache_Beam_Model_Pipeline_V1_Extensions_beam_urn = SwiftProtobuf.MessageExtension<SwiftProtobuf.OptionalExtensionField<SwiftProtobuf.ProtobufString>, SwiftProtobuf.Google_Protobuf_EnumValueOptions>(
_protobuf_fieldNumber: 185324356,
fieldName: "org.apache.beam.model.pipeline.v1.beam_urn"
)
/// A value to store other constants
let Org_Apache_Beam_Model_Pipeline_V1_Extensions_beam_constant = SwiftProtobuf.MessageExtension<SwiftProtobuf.OptionalExtensionField<SwiftProtobuf.ProtobufString>, SwiftProtobuf.Google_Protobuf_EnumValueOptions>(
_protobuf_fieldNumber: 185324357,
fieldName: "org.apache.beam.model.pipeline.v1.beam_constant"
)
// MARK: - Code below here is support for the SwiftProtobuf runtime.
fileprivate let _protobuf_package = "org.apache.beam.model.pipeline.v1"
extension Org_Apache_Beam_Model_Pipeline_V1_BeamConstants: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".BeamConstants"
static let _protobuf_nameMap = SwiftProtobuf._NameMap()
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let _ = try decoder.nextFieldNumber() {
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_BeamConstants, rhs: Org_Apache_Beam_Model_Pipeline_V1_BeamConstants) -> Bool {
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_BeamConstants.Constants: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
0: .same(proto: "MIN_TIMESTAMP_MILLIS"),
1: .same(proto: "MAX_TIMESTAMP_MILLIS"),
2: .same(proto: "GLOBAL_WINDOW_MAX_TIMESTAMP_MILLIS"),
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_Components: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".Components"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "transforms"),
2: .same(proto: "pcollections"),
3: .standard(proto: "windowing_strategies"),
4: .same(proto: "coders"),
5: .same(proto: "environments"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeMapField(fieldType: SwiftProtobuf._ProtobufMessageMap<SwiftProtobuf.ProtobufString,Org_Apache_Beam_Model_Pipeline_V1_PTransform>.self, value: &self.transforms) }()
case 2: try { try decoder.decodeMapField(fieldType: SwiftProtobuf._ProtobufMessageMap<SwiftProtobuf.ProtobufString,Org_Apache_Beam_Model_Pipeline_V1_PCollection>.self, value: &self.pcollections) }()
case 3: try { try decoder.decodeMapField(fieldType: SwiftProtobuf._ProtobufMessageMap<SwiftProtobuf.ProtobufString,Org_Apache_Beam_Model_Pipeline_V1_WindowingStrategy>.self, value: &self.windowingStrategies) }()
case 4: try { try decoder.decodeMapField(fieldType: SwiftProtobuf._ProtobufMessageMap<SwiftProtobuf.ProtobufString,Org_Apache_Beam_Model_Pipeline_V1_Coder>.self, value: &self.coders) }()
case 5: try { try decoder.decodeMapField(fieldType: SwiftProtobuf._ProtobufMessageMap<SwiftProtobuf.ProtobufString,Org_Apache_Beam_Model_Pipeline_V1_Environment>.self, value: &self.environments) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.transforms.isEmpty {
try visitor.visitMapField(fieldType: SwiftProtobuf._ProtobufMessageMap<SwiftProtobuf.ProtobufString,Org_Apache_Beam_Model_Pipeline_V1_PTransform>.self, value: self.transforms, fieldNumber: 1)
}
if !self.pcollections.isEmpty {
try visitor.visitMapField(fieldType: SwiftProtobuf._ProtobufMessageMap<SwiftProtobuf.ProtobufString,Org_Apache_Beam_Model_Pipeline_V1_PCollection>.self, value: self.pcollections, fieldNumber: 2)
}
if !self.windowingStrategies.isEmpty {
try visitor.visitMapField(fieldType: SwiftProtobuf._ProtobufMessageMap<SwiftProtobuf.ProtobufString,Org_Apache_Beam_Model_Pipeline_V1_WindowingStrategy>.self, value: self.windowingStrategies, fieldNumber: 3)
}
if !self.coders.isEmpty {
try visitor.visitMapField(fieldType: SwiftProtobuf._ProtobufMessageMap<SwiftProtobuf.ProtobufString,Org_Apache_Beam_Model_Pipeline_V1_Coder>.self, value: self.coders, fieldNumber: 4)
}
if !self.environments.isEmpty {
try visitor.visitMapField(fieldType: SwiftProtobuf._ProtobufMessageMap<SwiftProtobuf.ProtobufString,Org_Apache_Beam_Model_Pipeline_V1_Environment>.self, value: self.environments, fieldNumber: 5)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_Components, rhs: Org_Apache_Beam_Model_Pipeline_V1_Components) -> Bool {
if lhs.transforms != rhs.transforms {return false}
if lhs.pcollections != rhs.pcollections {return false}
if lhs.windowingStrategies != rhs.windowingStrategies {return false}
if lhs.coders != rhs.coders {return false}
if lhs.environments != rhs.environments {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_Pipeline: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".Pipeline"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "components"),
2: .standard(proto: "root_transform_ids"),
3: .standard(proto: "display_data"),
4: .same(proto: "requirements"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularMessageField(value: &self._components) }()
case 2: try { try decoder.decodeRepeatedStringField(value: &self.rootTransformIds) }()
case 3: try { try decoder.decodeRepeatedMessageField(value: &self.displayData) }()
case 4: try { try decoder.decodeRepeatedStringField(value: &self.requirements) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every if/case branch local when no optimizations
// are enabled. https://github.com/apple/swift-protobuf/issues/1034 and
// https://github.com/apple/swift-protobuf/issues/1182
try { if let v = self._components {
try visitor.visitSingularMessageField(value: v, fieldNumber: 1)
} }()
if !self.rootTransformIds.isEmpty {
try visitor.visitRepeatedStringField(value: self.rootTransformIds, fieldNumber: 2)
}
if !self.displayData.isEmpty {
try visitor.visitRepeatedMessageField(value: self.displayData, fieldNumber: 3)
}
if !self.requirements.isEmpty {
try visitor.visitRepeatedStringField(value: self.requirements, fieldNumber: 4)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_Pipeline, rhs: Org_Apache_Beam_Model_Pipeline_V1_Pipeline) -> Bool {
if lhs._components != rhs._components {return false}
if lhs.rootTransformIds != rhs.rootTransformIds {return false}
if lhs.displayData != rhs.displayData {return false}
if lhs.requirements != rhs.requirements {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_PTransform: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".PTransform"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
5: .standard(proto: "unique_name"),
1: .same(proto: "spec"),
2: .same(proto: "subtransforms"),
3: .same(proto: "inputs"),
4: .same(proto: "outputs"),
6: .standard(proto: "display_data"),
7: .standard(proto: "environment_id"),
8: .same(proto: "annotations"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularMessageField(value: &self._spec) }()
case 2: try { try decoder.decodeRepeatedStringField(value: &self.subtransforms) }()
case 3: try { try decoder.decodeMapField(fieldType: SwiftProtobuf._ProtobufMap<SwiftProtobuf.ProtobufString,SwiftProtobuf.ProtobufString>.self, value: &self.inputs) }()
case 4: try { try decoder.decodeMapField(fieldType: SwiftProtobuf._ProtobufMap<SwiftProtobuf.ProtobufString,SwiftProtobuf.ProtobufString>.self, value: &self.outputs) }()
case 5: try { try decoder.decodeSingularStringField(value: &self.uniqueName) }()
case 6: try { try decoder.decodeRepeatedMessageField(value: &self.displayData) }()
case 7: try { try decoder.decodeSingularStringField(value: &self.environmentID) }()
case 8: try { try decoder.decodeMapField(fieldType: SwiftProtobuf._ProtobufMap<SwiftProtobuf.ProtobufString,SwiftProtobuf.ProtobufBytes>.self, value: &self.annotations) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every if/case branch local when no optimizations
// are enabled. https://github.com/apple/swift-protobuf/issues/1034 and
// https://github.com/apple/swift-protobuf/issues/1182
try { if let v = self._spec {
try visitor.visitSingularMessageField(value: v, fieldNumber: 1)
} }()
if !self.subtransforms.isEmpty {
try visitor.visitRepeatedStringField(value: self.subtransforms, fieldNumber: 2)
}
if !self.inputs.isEmpty {
try visitor.visitMapField(fieldType: SwiftProtobuf._ProtobufMap<SwiftProtobuf.ProtobufString,SwiftProtobuf.ProtobufString>.self, value: self.inputs, fieldNumber: 3)
}
if !self.outputs.isEmpty {
try visitor.visitMapField(fieldType: SwiftProtobuf._ProtobufMap<SwiftProtobuf.ProtobufString,SwiftProtobuf.ProtobufString>.self, value: self.outputs, fieldNumber: 4)
}
if !self.uniqueName.isEmpty {
try visitor.visitSingularStringField(value: self.uniqueName, fieldNumber: 5)
}
if !self.displayData.isEmpty {
try visitor.visitRepeatedMessageField(value: self.displayData, fieldNumber: 6)
}
if !self.environmentID.isEmpty {
try visitor.visitSingularStringField(value: self.environmentID, fieldNumber: 7)
}
if !self.annotations.isEmpty {
try visitor.visitMapField(fieldType: SwiftProtobuf._ProtobufMap<SwiftProtobuf.ProtobufString,SwiftProtobuf.ProtobufBytes>.self, value: self.annotations, fieldNumber: 8)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_PTransform, rhs: Org_Apache_Beam_Model_Pipeline_V1_PTransform) -> Bool {
if lhs.uniqueName != rhs.uniqueName {return false}
if lhs._spec != rhs._spec {return false}
if lhs.subtransforms != rhs.subtransforms {return false}
if lhs.inputs != rhs.inputs {return false}
if lhs.outputs != rhs.outputs {return false}
if lhs.displayData != rhs.displayData {return false}
if lhs.environmentID != rhs.environmentID {return false}
if lhs.annotations != rhs.annotations {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".StandardPTransforms"
static let _protobuf_nameMap = SwiftProtobuf._NameMap()
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let _ = try decoder.nextFieldNumber() {
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms, rhs: Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms) -> Bool {
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms.Primitives: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
0: .same(proto: "PAR_DO"),
1: .same(proto: "FLATTEN"),
2: .same(proto: "GROUP_BY_KEY"),
3: .same(proto: "IMPULSE"),
4: .same(proto: "ASSIGN_WINDOWS"),
5: .same(proto: "TEST_STREAM"),
6: .same(proto: "MAP_WINDOWS"),
7: .same(proto: "MERGE_WINDOWS"),
8: .same(proto: "TO_STRING"),
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms.DeprecatedPrimitives: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
0: .same(proto: "READ"),
1: .same(proto: "CREATE_VIEW"),
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms.Composites: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
0: .same(proto: "COMBINE_PER_KEY"),
1: .same(proto: "COMBINE_GLOBALLY"),
2: .same(proto: "RESHUFFLE"),
3: .same(proto: "WRITE_FILES"),
4: .same(proto: "PUBSUB_READ"),
5: .same(proto: "PUBSUB_WRITE"),
6: .same(proto: "GROUP_INTO_BATCHES_WITH_SHARDED_KEY"),
7: .same(proto: "PUBSUB_WRITE_V2"),
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms.CombineComponents: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
0: .same(proto: "COMBINE_PER_KEY_PRECOMBINE"),
1: .same(proto: "COMBINE_PER_KEY_MERGE_ACCUMULATORS"),
2: .same(proto: "COMBINE_PER_KEY_EXTRACT_OUTPUTS"),
3: .same(proto: "COMBINE_GROUPED_VALUES"),
4: .same(proto: "COMBINE_PER_KEY_CONVERT_TO_ACCUMULATORS"),
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms.SplittableParDoComponents: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
0: .same(proto: "PAIR_WITH_RESTRICTION"),
1: .same(proto: "SPLIT_AND_SIZE_RESTRICTIONS"),
2: .same(proto: "PROCESS_SIZED_ELEMENTS_AND_RESTRICTIONS"),
3: .same(proto: "TRUNCATE_SIZED_RESTRICTION"),
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardPTransforms.GroupIntoBatchesComponents: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
0: .same(proto: "GROUP_INTO_BATCHES"),
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardSideInputTypes: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".StandardSideInputTypes"
static let _protobuf_nameMap = SwiftProtobuf._NameMap()
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let _ = try decoder.nextFieldNumber() {
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_StandardSideInputTypes, rhs: Org_Apache_Beam_Model_Pipeline_V1_StandardSideInputTypes) -> Bool {
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardSideInputTypes.Enum: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
0: .same(proto: "ITERABLE"),
1: .same(proto: "MULTIMAP"),
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardUserStateTypes: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".StandardUserStateTypes"
static let _protobuf_nameMap = SwiftProtobuf._NameMap()
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let _ = try decoder.nextFieldNumber() {
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_StandardUserStateTypes, rhs: Org_Apache_Beam_Model_Pipeline_V1_StandardUserStateTypes) -> Bool {
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardUserStateTypes.Enum: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
0: .same(proto: "BAG"),
1: .same(proto: "MULTIMAP"),
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_PCollection: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".PCollection"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "unique_name"),
2: .standard(proto: "coder_id"),
3: .standard(proto: "is_bounded"),
4: .standard(proto: "windowing_strategy_id"),
5: .standard(proto: "display_data"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.uniqueName) }()
case 2: try { try decoder.decodeSingularStringField(value: &self.coderID) }()
case 3: try { try decoder.decodeSingularEnumField(value: &self.isBounded) }()
case 4: try { try decoder.decodeSingularStringField(value: &self.windowingStrategyID) }()
case 5: try { try decoder.decodeRepeatedMessageField(value: &self.displayData) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.uniqueName.isEmpty {
try visitor.visitSingularStringField(value: self.uniqueName, fieldNumber: 1)
}
if !self.coderID.isEmpty {
try visitor.visitSingularStringField(value: self.coderID, fieldNumber: 2)
}
if self.isBounded != .unspecified {
try visitor.visitSingularEnumField(value: self.isBounded, fieldNumber: 3)
}
if !self.windowingStrategyID.isEmpty {
try visitor.visitSingularStringField(value: self.windowingStrategyID, fieldNumber: 4)
}
if !self.displayData.isEmpty {
try visitor.visitRepeatedMessageField(value: self.displayData, fieldNumber: 5)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_PCollection, rhs: Org_Apache_Beam_Model_Pipeline_V1_PCollection) -> Bool {
if lhs.uniqueName != rhs.uniqueName {return false}
if lhs.coderID != rhs.coderID {return false}
if lhs.isBounded != rhs.isBounded {return false}
if lhs.windowingStrategyID != rhs.windowingStrategyID {return false}
if lhs.displayData != rhs.displayData {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_ParDoPayload: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".ParDoPayload"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "do_fn"),
3: .standard(proto: "side_inputs"),
4: .standard(proto: "state_specs"),
9: .standard(proto: "timer_family_specs"),
7: .standard(proto: "restriction_coder_id"),
8: .standard(proto: "requests_finalization"),
10: .standard(proto: "requires_time_sorted_input"),
11: .standard(proto: "requires_stable_input"),
12: .standard(proto: "on_window_expiration_timer_family_spec"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularMessageField(value: &self._doFn) }()
case 3: try { try decoder.decodeMapField(fieldType: SwiftProtobuf._ProtobufMessageMap<SwiftProtobuf.ProtobufString,Org_Apache_Beam_Model_Pipeline_V1_SideInput>.self, value: &self.sideInputs) }()
case 4: try { try decoder.decodeMapField(fieldType: SwiftProtobuf._ProtobufMessageMap<SwiftProtobuf.ProtobufString,Org_Apache_Beam_Model_Pipeline_V1_StateSpec>.self, value: &self.stateSpecs) }()
case 7: try { try decoder.decodeSingularStringField(value: &self.restrictionCoderID) }()
case 8: try { try decoder.decodeSingularBoolField(value: &self.requestsFinalization) }()
case 9: try { try decoder.decodeMapField(fieldType: SwiftProtobuf._ProtobufMessageMap<SwiftProtobuf.ProtobufString,Org_Apache_Beam_Model_Pipeline_V1_TimerFamilySpec>.self, value: &self.timerFamilySpecs) }()
case 10: try { try decoder.decodeSingularBoolField(value: &self.requiresTimeSortedInput) }()
case 11: try { try decoder.decodeSingularBoolField(value: &self.requiresStableInput) }()
case 12: try { try decoder.decodeSingularStringField(value: &self.onWindowExpirationTimerFamilySpec) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every if/case branch local when no optimizations
// are enabled. https://github.com/apple/swift-protobuf/issues/1034 and
// https://github.com/apple/swift-protobuf/issues/1182
try { if let v = self._doFn {
try visitor.visitSingularMessageField(value: v, fieldNumber: 1)
} }()
if !self.sideInputs.isEmpty {
try visitor.visitMapField(fieldType: SwiftProtobuf._ProtobufMessageMap<SwiftProtobuf.ProtobufString,Org_Apache_Beam_Model_Pipeline_V1_SideInput>.self, value: self.sideInputs, fieldNumber: 3)
}
if !self.stateSpecs.isEmpty {
try visitor.visitMapField(fieldType: SwiftProtobuf._ProtobufMessageMap<SwiftProtobuf.ProtobufString,Org_Apache_Beam_Model_Pipeline_V1_StateSpec>.self, value: self.stateSpecs, fieldNumber: 4)
}
if !self.restrictionCoderID.isEmpty {
try visitor.visitSingularStringField(value: self.restrictionCoderID, fieldNumber: 7)
}
if self.requestsFinalization != false {
try visitor.visitSingularBoolField(value: self.requestsFinalization, fieldNumber: 8)
}
if !self.timerFamilySpecs.isEmpty {
try visitor.visitMapField(fieldType: SwiftProtobuf._ProtobufMessageMap<SwiftProtobuf.ProtobufString,Org_Apache_Beam_Model_Pipeline_V1_TimerFamilySpec>.self, value: self.timerFamilySpecs, fieldNumber: 9)
}
if self.requiresTimeSortedInput != false {
try visitor.visitSingularBoolField(value: self.requiresTimeSortedInput, fieldNumber: 10)
}
if self.requiresStableInput != false {
try visitor.visitSingularBoolField(value: self.requiresStableInput, fieldNumber: 11)
}
if !self.onWindowExpirationTimerFamilySpec.isEmpty {
try visitor.visitSingularStringField(value: self.onWindowExpirationTimerFamilySpec, fieldNumber: 12)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_ParDoPayload, rhs: Org_Apache_Beam_Model_Pipeline_V1_ParDoPayload) -> Bool {
if lhs._doFn != rhs._doFn {return false}
if lhs.sideInputs != rhs.sideInputs {return false}
if lhs.stateSpecs != rhs.stateSpecs {return false}
if lhs.timerFamilySpecs != rhs.timerFamilySpecs {return false}
if lhs.restrictionCoderID != rhs.restrictionCoderID {return false}
if lhs.requestsFinalization != rhs.requestsFinalization {return false}
if lhs.requiresTimeSortedInput != rhs.requiresTimeSortedInput {return false}
if lhs.requiresStableInput != rhs.requiresStableInput {return false}
if lhs.onWindowExpirationTimerFamilySpec != rhs.onWindowExpirationTimerFamilySpec {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_StateSpec: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".StateSpec"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "read_modify_write_spec"),
2: .standard(proto: "bag_spec"),
3: .standard(proto: "combining_spec"),
4: .standard(proto: "map_spec"),
5: .standard(proto: "set_spec"),
6: .standard(proto: "ordered_list_spec"),
8: .standard(proto: "multimap_spec"),
7: .same(proto: "protocol"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_ReadModifyWriteStateSpec?
var hadOneofValue = false
if let current = self.spec {
hadOneofValue = true
if case .readModifyWriteSpec(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
self.spec = .readModifyWriteSpec(v)
}
}()
case 2: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_BagStateSpec?
var hadOneofValue = false
if let current = self.spec {
hadOneofValue = true
if case .bagSpec(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
self.spec = .bagSpec(v)
}
}()
case 3: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_CombiningStateSpec?
var hadOneofValue = false
if let current = self.spec {
hadOneofValue = true
if case .combiningSpec(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
self.spec = .combiningSpec(v)
}
}()
case 4: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_MapStateSpec?
var hadOneofValue = false
if let current = self.spec {
hadOneofValue = true
if case .mapSpec(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
self.spec = .mapSpec(v)
}
}()
case 5: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_SetStateSpec?
var hadOneofValue = false
if let current = self.spec {
hadOneofValue = true
if case .setSpec(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
self.spec = .setSpec(v)
}
}()
case 6: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_OrderedListStateSpec?
var hadOneofValue = false
if let current = self.spec {
hadOneofValue = true
if case .orderedListSpec(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
self.spec = .orderedListSpec(v)
}
}()
case 7: try { try decoder.decodeSingularMessageField(value: &self._protocol) }()
case 8: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_MultimapStateSpec?
var hadOneofValue = false
if let current = self.spec {
hadOneofValue = true
if case .multimapSpec(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
self.spec = .multimapSpec(v)
}
}()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every if/case branch local when no optimizations
// are enabled. https://github.com/apple/swift-protobuf/issues/1034 and
// https://github.com/apple/swift-protobuf/issues/1182
switch self.spec {
case .readModifyWriteSpec?: try {
guard case .readModifyWriteSpec(let v)? = self.spec else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 1)
}()
case .bagSpec?: try {
guard case .bagSpec(let v)? = self.spec else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 2)
}()
case .combiningSpec?: try {
guard case .combiningSpec(let v)? = self.spec else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 3)
}()
case .mapSpec?: try {
guard case .mapSpec(let v)? = self.spec else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 4)
}()
case .setSpec?: try {
guard case .setSpec(let v)? = self.spec else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 5)
}()
case .orderedListSpec?: try {
guard case .orderedListSpec(let v)? = self.spec else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 6)
}()
default: break
}
try { if let v = self._protocol {
try visitor.visitSingularMessageField(value: v, fieldNumber: 7)
} }()
try { if case .multimapSpec(let v)? = self.spec {
try visitor.visitSingularMessageField(value: v, fieldNumber: 8)
} }()
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_StateSpec, rhs: Org_Apache_Beam_Model_Pipeline_V1_StateSpec) -> Bool {
if lhs.spec != rhs.spec {return false}
if lhs._protocol != rhs._protocol {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_ReadModifyWriteStateSpec: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".ReadModifyWriteStateSpec"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "coder_id"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.coderID) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.coderID.isEmpty {
try visitor.visitSingularStringField(value: self.coderID, fieldNumber: 1)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_ReadModifyWriteStateSpec, rhs: Org_Apache_Beam_Model_Pipeline_V1_ReadModifyWriteStateSpec) -> Bool {
if lhs.coderID != rhs.coderID {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_BagStateSpec: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".BagStateSpec"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "element_coder_id"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.elementCoderID) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.elementCoderID.isEmpty {
try visitor.visitSingularStringField(value: self.elementCoderID, fieldNumber: 1)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_BagStateSpec, rhs: Org_Apache_Beam_Model_Pipeline_V1_BagStateSpec) -> Bool {
if lhs.elementCoderID != rhs.elementCoderID {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_OrderedListStateSpec: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".OrderedListStateSpec"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "element_coder_id"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.elementCoderID) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.elementCoderID.isEmpty {
try visitor.visitSingularStringField(value: self.elementCoderID, fieldNumber: 1)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_OrderedListStateSpec, rhs: Org_Apache_Beam_Model_Pipeline_V1_OrderedListStateSpec) -> Bool {
if lhs.elementCoderID != rhs.elementCoderID {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_CombiningStateSpec: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".CombiningStateSpec"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "accumulator_coder_id"),
2: .standard(proto: "combine_fn"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.accumulatorCoderID) }()
case 2: try { try decoder.decodeSingularMessageField(value: &self._combineFn) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every if/case branch local when no optimizations
// are enabled. https://github.com/apple/swift-protobuf/issues/1034 and
// https://github.com/apple/swift-protobuf/issues/1182
if !self.accumulatorCoderID.isEmpty {
try visitor.visitSingularStringField(value: self.accumulatorCoderID, fieldNumber: 1)
}
try { if let v = self._combineFn {
try visitor.visitSingularMessageField(value: v, fieldNumber: 2)
} }()
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_CombiningStateSpec, rhs: Org_Apache_Beam_Model_Pipeline_V1_CombiningStateSpec) -> Bool {
if lhs.accumulatorCoderID != rhs.accumulatorCoderID {return false}
if lhs._combineFn != rhs._combineFn {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_MapStateSpec: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".MapStateSpec"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "key_coder_id"),
2: .standard(proto: "value_coder_id"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.keyCoderID) }()
case 2: try { try decoder.decodeSingularStringField(value: &self.valueCoderID) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.keyCoderID.isEmpty {
try visitor.visitSingularStringField(value: self.keyCoderID, fieldNumber: 1)
}
if !self.valueCoderID.isEmpty {
try visitor.visitSingularStringField(value: self.valueCoderID, fieldNumber: 2)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_MapStateSpec, rhs: Org_Apache_Beam_Model_Pipeline_V1_MapStateSpec) -> Bool {
if lhs.keyCoderID != rhs.keyCoderID {return false}
if lhs.valueCoderID != rhs.valueCoderID {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_MultimapStateSpec: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".MultimapStateSpec"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "key_coder_id"),
2: .standard(proto: "value_coder_id"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.keyCoderID) }()
case 2: try { try decoder.decodeSingularStringField(value: &self.valueCoderID) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.keyCoderID.isEmpty {
try visitor.visitSingularStringField(value: self.keyCoderID, fieldNumber: 1)
}
if !self.valueCoderID.isEmpty {
try visitor.visitSingularStringField(value: self.valueCoderID, fieldNumber: 2)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_MultimapStateSpec, rhs: Org_Apache_Beam_Model_Pipeline_V1_MultimapStateSpec) -> Bool {
if lhs.keyCoderID != rhs.keyCoderID {return false}
if lhs.valueCoderID != rhs.valueCoderID {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_SetStateSpec: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".SetStateSpec"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "element_coder_id"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.elementCoderID) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.elementCoderID.isEmpty {
try visitor.visitSingularStringField(value: self.elementCoderID, fieldNumber: 1)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_SetStateSpec, rhs: Org_Apache_Beam_Model_Pipeline_V1_SetStateSpec) -> Bool {
if lhs.elementCoderID != rhs.elementCoderID {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_TimerFamilySpec: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".TimerFamilySpec"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "time_domain"),
2: .standard(proto: "timer_family_coder_id"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularEnumField(value: &self.timeDomain) }()
case 2: try { try decoder.decodeSingularStringField(value: &self.timerFamilyCoderID) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if self.timeDomain != .unspecified {
try visitor.visitSingularEnumField(value: self.timeDomain, fieldNumber: 1)
}
if !self.timerFamilyCoderID.isEmpty {
try visitor.visitSingularStringField(value: self.timerFamilyCoderID, fieldNumber: 2)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_TimerFamilySpec, rhs: Org_Apache_Beam_Model_Pipeline_V1_TimerFamilySpec) -> Bool {
if lhs.timeDomain != rhs.timeDomain {return false}
if lhs.timerFamilyCoderID != rhs.timerFamilyCoderID {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_IsBounded: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".IsBounded"
static let _protobuf_nameMap = SwiftProtobuf._NameMap()
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let _ = try decoder.nextFieldNumber() {
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_IsBounded, rhs: Org_Apache_Beam_Model_Pipeline_V1_IsBounded) -> Bool {
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_IsBounded.Enum: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
0: .same(proto: "UNSPECIFIED"),
1: .same(proto: "UNBOUNDED"),
2: .same(proto: "BOUNDED"),
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_ReadPayload: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".ReadPayload"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "source"),
2: .standard(proto: "is_bounded"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularMessageField(value: &self._source) }()
case 2: try { try decoder.decodeSingularEnumField(value: &self.isBounded) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every if/case branch local when no optimizations
// are enabled. https://github.com/apple/swift-protobuf/issues/1034 and
// https://github.com/apple/swift-protobuf/issues/1182
try { if let v = self._source {
try visitor.visitSingularMessageField(value: v, fieldNumber: 1)
} }()
if self.isBounded != .unspecified {
try visitor.visitSingularEnumField(value: self.isBounded, fieldNumber: 2)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_ReadPayload, rhs: Org_Apache_Beam_Model_Pipeline_V1_ReadPayload) -> Bool {
if lhs._source != rhs._source {return false}
if lhs.isBounded != rhs.isBounded {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_WindowIntoPayload: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".WindowIntoPayload"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "window_fn"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularMessageField(value: &self._windowFn) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every if/case branch local when no optimizations
// are enabled. https://github.com/apple/swift-protobuf/issues/1034 and
// https://github.com/apple/swift-protobuf/issues/1182
try { if let v = self._windowFn {
try visitor.visitSingularMessageField(value: v, fieldNumber: 1)
} }()
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_WindowIntoPayload, rhs: Org_Apache_Beam_Model_Pipeline_V1_WindowIntoPayload) -> Bool {
if lhs._windowFn != rhs._windowFn {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_CombinePayload: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".CombinePayload"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "combine_fn"),
2: .standard(proto: "accumulator_coder_id"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularMessageField(value: &self._combineFn) }()
case 2: try { try decoder.decodeSingularStringField(value: &self.accumulatorCoderID) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every if/case branch local when no optimizations
// are enabled. https://github.com/apple/swift-protobuf/issues/1034 and
// https://github.com/apple/swift-protobuf/issues/1182
try { if let v = self._combineFn {
try visitor.visitSingularMessageField(value: v, fieldNumber: 1)
} }()
if !self.accumulatorCoderID.isEmpty {
try visitor.visitSingularStringField(value: self.accumulatorCoderID, fieldNumber: 2)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_CombinePayload, rhs: Org_Apache_Beam_Model_Pipeline_V1_CombinePayload) -> Bool {
if lhs._combineFn != rhs._combineFn {return false}
if lhs.accumulatorCoderID != rhs.accumulatorCoderID {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".TestStreamPayload"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "coder_id"),
2: .same(proto: "events"),
3: .same(proto: "endpoint"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.coderID) }()
case 2: try { try decoder.decodeRepeatedMessageField(value: &self.events) }()
case 3: try { try decoder.decodeSingularMessageField(value: &self._endpoint) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every if/case branch local when no optimizations
// are enabled. https://github.com/apple/swift-protobuf/issues/1034 and
// https://github.com/apple/swift-protobuf/issues/1182
if !self.coderID.isEmpty {
try visitor.visitSingularStringField(value: self.coderID, fieldNumber: 1)
}
if !self.events.isEmpty {
try visitor.visitRepeatedMessageField(value: self.events, fieldNumber: 2)
}
try { if let v = self._endpoint {
try visitor.visitSingularMessageField(value: v, fieldNumber: 3)
} }()
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload, rhs: Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload) -> Bool {
if lhs.coderID != rhs.coderID {return false}
if lhs.events != rhs.events {return false}
if lhs._endpoint != rhs._endpoint {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.protoMessageName + ".Event"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "watermark_event"),
2: .standard(proto: "processing_time_event"),
3: .standard(proto: "element_event"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.AdvanceWatermark?
var hadOneofValue = false
if let current = self.event {
hadOneofValue = true
if case .watermarkEvent(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
self.event = .watermarkEvent(v)
}
}()
case 2: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.AdvanceProcessingTime?
var hadOneofValue = false
if let current = self.event {
hadOneofValue = true
if case .processingTimeEvent(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
self.event = .processingTimeEvent(v)
}
}()
case 3: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.AddElements?
var hadOneofValue = false
if let current = self.event {
hadOneofValue = true
if case .elementEvent(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
self.event = .elementEvent(v)
}
}()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every if/case branch local when no optimizations
// are enabled. https://github.com/apple/swift-protobuf/issues/1034 and
// https://github.com/apple/swift-protobuf/issues/1182
switch self.event {
case .watermarkEvent?: try {
guard case .watermarkEvent(let v)? = self.event else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 1)
}()
case .processingTimeEvent?: try {
guard case .processingTimeEvent(let v)? = self.event else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 2)
}()
case .elementEvent?: try {
guard case .elementEvent(let v)? = self.event else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 3)
}()
case nil: break
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event, rhs: Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event) -> Bool {
if lhs.event != rhs.event {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.AdvanceWatermark: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.protoMessageName + ".AdvanceWatermark"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "new_watermark"),
2: .same(proto: "tag"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularInt64Field(value: &self.newWatermark) }()
case 2: try { try decoder.decodeSingularStringField(value: &self.tag) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if self.newWatermark != 0 {
try visitor.visitSingularInt64Field(value: self.newWatermark, fieldNumber: 1)
}
if !self.tag.isEmpty {
try visitor.visitSingularStringField(value: self.tag, fieldNumber: 2)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.AdvanceWatermark, rhs: Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.AdvanceWatermark) -> Bool {
if lhs.newWatermark != rhs.newWatermark {return false}
if lhs.tag != rhs.tag {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.AdvanceProcessingTime: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.protoMessageName + ".AdvanceProcessingTime"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "advance_duration"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularInt64Field(value: &self.advanceDuration) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if self.advanceDuration != 0 {
try visitor.visitSingularInt64Field(value: self.advanceDuration, fieldNumber: 1)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.AdvanceProcessingTime, rhs: Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.AdvanceProcessingTime) -> Bool {
if lhs.advanceDuration != rhs.advanceDuration {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.AddElements: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.protoMessageName + ".AddElements"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "elements"),
3: .same(proto: "tag"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeRepeatedMessageField(value: &self.elements) }()
case 3: try { try decoder.decodeSingularStringField(value: &self.tag) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.elements.isEmpty {
try visitor.visitRepeatedMessageField(value: self.elements, fieldNumber: 1)
}
if !self.tag.isEmpty {
try visitor.visitSingularStringField(value: self.tag, fieldNumber: 3)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.AddElements, rhs: Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.Event.AddElements) -> Bool {
if lhs.elements != rhs.elements {return false}
if lhs.tag != rhs.tag {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.TimestampedElement: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.protoMessageName + ".TimestampedElement"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "encoded_element"),
2: .same(proto: "timestamp"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularBytesField(value: &self.encodedElement) }()
case 2: try { try decoder.decodeSingularInt64Field(value: &self.timestamp) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.encodedElement.isEmpty {
try visitor.visitSingularBytesField(value: self.encodedElement, fieldNumber: 1)
}
if self.timestamp != 0 {
try visitor.visitSingularInt64Field(value: self.timestamp, fieldNumber: 2)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.TimestampedElement, rhs: Org_Apache_Beam_Model_Pipeline_V1_TestStreamPayload.TimestampedElement) -> Bool {
if lhs.encodedElement != rhs.encodedElement {return false}
if lhs.timestamp != rhs.timestamp {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_EventsRequest: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".EventsRequest"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "output_ids"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeRepeatedStringField(value: &self.outputIds) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.outputIds.isEmpty {
try visitor.visitRepeatedStringField(value: self.outputIds, fieldNumber: 1)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_EventsRequest, rhs: Org_Apache_Beam_Model_Pipeline_V1_EventsRequest) -> Bool {
if lhs.outputIds != rhs.outputIds {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_WriteFilesPayload: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".WriteFilesPayload"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "sink"),
2: .standard(proto: "format_function"),
3: .standard(proto: "windowed_writes"),
4: .standard(proto: "runner_determined_sharding"),
5: .standard(proto: "side_inputs"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularMessageField(value: &self._sink) }()
case 2: try { try decoder.decodeSingularMessageField(value: &self._formatFunction) }()
case 3: try { try decoder.decodeSingularBoolField(value: &self.windowedWrites) }()
case 4: try { try decoder.decodeSingularBoolField(value: &self.runnerDeterminedSharding) }()
case 5: try { try decoder.decodeMapField(fieldType: SwiftProtobuf._ProtobufMessageMap<SwiftProtobuf.ProtobufString,Org_Apache_Beam_Model_Pipeline_V1_SideInput>.self, value: &self.sideInputs) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every if/case branch local when no optimizations
// are enabled. https://github.com/apple/swift-protobuf/issues/1034 and
// https://github.com/apple/swift-protobuf/issues/1182
try { if let v = self._sink {
try visitor.visitSingularMessageField(value: v, fieldNumber: 1)
} }()
try { if let v = self._formatFunction {
try visitor.visitSingularMessageField(value: v, fieldNumber: 2)
} }()
if self.windowedWrites != false {
try visitor.visitSingularBoolField(value: self.windowedWrites, fieldNumber: 3)
}
if self.runnerDeterminedSharding != false {
try visitor.visitSingularBoolField(value: self.runnerDeterminedSharding, fieldNumber: 4)
}
if !self.sideInputs.isEmpty {
try visitor.visitMapField(fieldType: SwiftProtobuf._ProtobufMessageMap<SwiftProtobuf.ProtobufString,Org_Apache_Beam_Model_Pipeline_V1_SideInput>.self, value: self.sideInputs, fieldNumber: 5)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_WriteFilesPayload, rhs: Org_Apache_Beam_Model_Pipeline_V1_WriteFilesPayload) -> Bool {
if lhs._sink != rhs._sink {return false}
if lhs._formatFunction != rhs._formatFunction {return false}
if lhs.windowedWrites != rhs.windowedWrites {return false}
if lhs.runnerDeterminedSharding != rhs.runnerDeterminedSharding {return false}
if lhs.sideInputs != rhs.sideInputs {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_PubSubReadPayload: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".PubSubReadPayload"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "topic"),
2: .same(proto: "subscription"),
3: .standard(proto: "timestamp_attribute"),
4: .standard(proto: "id_attribute"),
5: .standard(proto: "with_attributes"),
6: .standard(proto: "topic_runtime_overridden"),
7: .standard(proto: "subscription_runtime_overridden"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.topic) }()
case 2: try { try decoder.decodeSingularStringField(value: &self.subscription) }()
case 3: try { try decoder.decodeSingularStringField(value: &self.timestampAttribute) }()
case 4: try { try decoder.decodeSingularStringField(value: &self.idAttribute) }()
case 5: try { try decoder.decodeSingularBoolField(value: &self.withAttributes) }()
case 6: try { try decoder.decodeSingularStringField(value: &self.topicRuntimeOverridden) }()
case 7: try { try decoder.decodeSingularStringField(value: &self.subscriptionRuntimeOverridden) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.topic.isEmpty {
try visitor.visitSingularStringField(value: self.topic, fieldNumber: 1)
}
if !self.subscription.isEmpty {
try visitor.visitSingularStringField(value: self.subscription, fieldNumber: 2)
}
if !self.timestampAttribute.isEmpty {
try visitor.visitSingularStringField(value: self.timestampAttribute, fieldNumber: 3)
}
if !self.idAttribute.isEmpty {
try visitor.visitSingularStringField(value: self.idAttribute, fieldNumber: 4)
}
if self.withAttributes != false {
try visitor.visitSingularBoolField(value: self.withAttributes, fieldNumber: 5)
}
if !self.topicRuntimeOverridden.isEmpty {
try visitor.visitSingularStringField(value: self.topicRuntimeOverridden, fieldNumber: 6)
}
if !self.subscriptionRuntimeOverridden.isEmpty {
try visitor.visitSingularStringField(value: self.subscriptionRuntimeOverridden, fieldNumber: 7)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_PubSubReadPayload, rhs: Org_Apache_Beam_Model_Pipeline_V1_PubSubReadPayload) -> Bool {
if lhs.topic != rhs.topic {return false}
if lhs.subscription != rhs.subscription {return false}
if lhs.timestampAttribute != rhs.timestampAttribute {return false}
if lhs.idAttribute != rhs.idAttribute {return false}
if lhs.withAttributes != rhs.withAttributes {return false}
if lhs.topicRuntimeOverridden != rhs.topicRuntimeOverridden {return false}
if lhs.subscriptionRuntimeOverridden != rhs.subscriptionRuntimeOverridden {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_PubSubWritePayload: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".PubSubWritePayload"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "topic"),
2: .standard(proto: "timestamp_attribute"),
3: .standard(proto: "id_attribute"),
4: .standard(proto: "topic_runtime_overridden"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.topic) }()
case 2: try { try decoder.decodeSingularStringField(value: &self.timestampAttribute) }()
case 3: try { try decoder.decodeSingularStringField(value: &self.idAttribute) }()
case 4: try { try decoder.decodeSingularStringField(value: &self.topicRuntimeOverridden) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.topic.isEmpty {
try visitor.visitSingularStringField(value: self.topic, fieldNumber: 1)
}
if !self.timestampAttribute.isEmpty {
try visitor.visitSingularStringField(value: self.timestampAttribute, fieldNumber: 2)
}
if !self.idAttribute.isEmpty {
try visitor.visitSingularStringField(value: self.idAttribute, fieldNumber: 3)
}
if !self.topicRuntimeOverridden.isEmpty {
try visitor.visitSingularStringField(value: self.topicRuntimeOverridden, fieldNumber: 4)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_PubSubWritePayload, rhs: Org_Apache_Beam_Model_Pipeline_V1_PubSubWritePayload) -> Bool {
if lhs.topic != rhs.topic {return false}
if lhs.timestampAttribute != rhs.timestampAttribute {return false}
if lhs.idAttribute != rhs.idAttribute {return false}
if lhs.topicRuntimeOverridden != rhs.topicRuntimeOverridden {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_GroupIntoBatchesPayload: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".GroupIntoBatchesPayload"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "batch_size"),
3: .standard(proto: "batch_size_bytes"),
2: .standard(proto: "max_buffering_duration_millis"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularInt64Field(value: &self.batchSize) }()
case 2: try { try decoder.decodeSingularInt64Field(value: &self.maxBufferingDurationMillis) }()
case 3: try { try decoder.decodeSingularInt64Field(value: &self.batchSizeBytes) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if self.batchSize != 0 {
try visitor.visitSingularInt64Field(value: self.batchSize, fieldNumber: 1)
}
if self.maxBufferingDurationMillis != 0 {
try visitor.visitSingularInt64Field(value: self.maxBufferingDurationMillis, fieldNumber: 2)
}
if self.batchSizeBytes != 0 {
try visitor.visitSingularInt64Field(value: self.batchSizeBytes, fieldNumber: 3)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_GroupIntoBatchesPayload, rhs: Org_Apache_Beam_Model_Pipeline_V1_GroupIntoBatchesPayload) -> Bool {
if lhs.batchSize != rhs.batchSize {return false}
if lhs.batchSizeBytes != rhs.batchSizeBytes {return false}
if lhs.maxBufferingDurationMillis != rhs.maxBufferingDurationMillis {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_Coder: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".Coder"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "spec"),
2: .standard(proto: "component_coder_ids"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularMessageField(value: &self._spec) }()
case 2: try { try decoder.decodeRepeatedStringField(value: &self.componentCoderIds) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every if/case branch local when no optimizations
// are enabled. https://github.com/apple/swift-protobuf/issues/1034 and
// https://github.com/apple/swift-protobuf/issues/1182
try { if let v = self._spec {
try visitor.visitSingularMessageField(value: v, fieldNumber: 1)
} }()
if !self.componentCoderIds.isEmpty {
try visitor.visitRepeatedStringField(value: self.componentCoderIds, fieldNumber: 2)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_Coder, rhs: Org_Apache_Beam_Model_Pipeline_V1_Coder) -> Bool {
if lhs._spec != rhs._spec {return false}
if lhs.componentCoderIds != rhs.componentCoderIds {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardCoders: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".StandardCoders"
static let _protobuf_nameMap = SwiftProtobuf._NameMap()
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let _ = try decoder.nextFieldNumber() {
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_StandardCoders, rhs: Org_Apache_Beam_Model_Pipeline_V1_StandardCoders) -> Bool {
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardCoders.Enum: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
0: .same(proto: "BYTES"),
1: .same(proto: "KV"),
2: .same(proto: "VARINT"),
3: .same(proto: "ITERABLE"),
4: .same(proto: "TIMER"),
5: .same(proto: "INTERVAL_WINDOW"),
6: .same(proto: "LENGTH_PREFIX"),
7: .same(proto: "GLOBAL_WINDOW"),
8: .same(proto: "WINDOWED_VALUE"),
9: .same(proto: "STATE_BACKED_ITERABLE"),
10: .same(proto: "STRING_UTF8"),
11: .same(proto: "DOUBLE"),
12: .same(proto: "BOOL"),
13: .same(proto: "ROW"),
14: .same(proto: "PARAM_WINDOWED_VALUE"),
15: .same(proto: "SHARDED_KEY"),
16: .same(proto: "CUSTOM_WINDOW"),
17: .same(proto: "NULLABLE"),
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_WindowingStrategy: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".WindowingStrategy"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "window_fn"),
2: .standard(proto: "merge_status"),
3: .standard(proto: "window_coder_id"),
4: .same(proto: "trigger"),
5: .standard(proto: "accumulation_mode"),
6: .standard(proto: "output_time"),
7: .standard(proto: "closing_behavior"),
8: .standard(proto: "allowed_lateness"),
9: .standard(proto: "on_time_behavior"),
10: .standard(proto: "assigns_to_one_window"),
11: .standard(proto: "environment_id"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularMessageField(value: &self._windowFn) }()
case 2: try { try decoder.decodeSingularEnumField(value: &self.mergeStatus) }()
case 3: try { try decoder.decodeSingularStringField(value: &self.windowCoderID) }()
case 4: try { try decoder.decodeSingularMessageField(value: &self._trigger) }()
case 5: try { try decoder.decodeSingularEnumField(value: &self.accumulationMode) }()
case 6: try { try decoder.decodeSingularEnumField(value: &self.outputTime) }()
case 7: try { try decoder.decodeSingularEnumField(value: &self.closingBehavior) }()
case 8: try { try decoder.decodeSingularInt64Field(value: &self.allowedLateness) }()
case 9: try { try decoder.decodeSingularEnumField(value: &self.onTimeBehavior) }()
case 10: try { try decoder.decodeSingularBoolField(value: &self.assignsToOneWindow) }()
case 11: try { try decoder.decodeSingularStringField(value: &self.environmentID) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every if/case branch local when no optimizations
// are enabled. https://github.com/apple/swift-protobuf/issues/1034 and
// https://github.com/apple/swift-protobuf/issues/1182
try { if let v = self._windowFn {
try visitor.visitSingularMessageField(value: v, fieldNumber: 1)
} }()
if self.mergeStatus != .unspecified {
try visitor.visitSingularEnumField(value: self.mergeStatus, fieldNumber: 2)
}
if !self.windowCoderID.isEmpty {
try visitor.visitSingularStringField(value: self.windowCoderID, fieldNumber: 3)
}
try { if let v = self._trigger {
try visitor.visitSingularMessageField(value: v, fieldNumber: 4)
} }()
if self.accumulationMode != .unspecified {
try visitor.visitSingularEnumField(value: self.accumulationMode, fieldNumber: 5)
}
if self.outputTime != .unspecified {
try visitor.visitSingularEnumField(value: self.outputTime, fieldNumber: 6)
}
if self.closingBehavior != .unspecified {
try visitor.visitSingularEnumField(value: self.closingBehavior, fieldNumber: 7)
}
if self.allowedLateness != 0 {
try visitor.visitSingularInt64Field(value: self.allowedLateness, fieldNumber: 8)
}
if self.onTimeBehavior != .unspecified {
try visitor.visitSingularEnumField(value: self.onTimeBehavior, fieldNumber: 9)
}
if self.assignsToOneWindow != false {
try visitor.visitSingularBoolField(value: self.assignsToOneWindow, fieldNumber: 10)
}
if !self.environmentID.isEmpty {
try visitor.visitSingularStringField(value: self.environmentID, fieldNumber: 11)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_WindowingStrategy, rhs: Org_Apache_Beam_Model_Pipeline_V1_WindowingStrategy) -> Bool {
if lhs._windowFn != rhs._windowFn {return false}
if lhs.mergeStatus != rhs.mergeStatus {return false}
if lhs.windowCoderID != rhs.windowCoderID {return false}
if lhs._trigger != rhs._trigger {return false}
if lhs.accumulationMode != rhs.accumulationMode {return false}
if lhs.outputTime != rhs.outputTime {return false}
if lhs.closingBehavior != rhs.closingBehavior {return false}
if lhs.allowedLateness != rhs.allowedLateness {return false}
if lhs.onTimeBehavior != rhs.onTimeBehavior {return false}
if lhs.assignsToOneWindow != rhs.assignsToOneWindow {return false}
if lhs.environmentID != rhs.environmentID {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_MergeStatus: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".MergeStatus"
static let _protobuf_nameMap = SwiftProtobuf._NameMap()
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let _ = try decoder.nextFieldNumber() {
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_MergeStatus, rhs: Org_Apache_Beam_Model_Pipeline_V1_MergeStatus) -> Bool {
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_MergeStatus.Enum: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
0: .same(proto: "UNSPECIFIED"),
1: .same(proto: "NON_MERGING"),
2: .same(proto: "NEEDS_MERGE"),
3: .same(proto: "ALREADY_MERGED"),
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_AccumulationMode: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".AccumulationMode"
static let _protobuf_nameMap = SwiftProtobuf._NameMap()
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let _ = try decoder.nextFieldNumber() {
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_AccumulationMode, rhs: Org_Apache_Beam_Model_Pipeline_V1_AccumulationMode) -> Bool {
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_AccumulationMode.Enum: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
0: .same(proto: "UNSPECIFIED"),
1: .same(proto: "DISCARDING"),
2: .same(proto: "ACCUMULATING"),
3: .same(proto: "RETRACTING"),
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_ClosingBehavior: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".ClosingBehavior"
static let _protobuf_nameMap = SwiftProtobuf._NameMap()
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let _ = try decoder.nextFieldNumber() {
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_ClosingBehavior, rhs: Org_Apache_Beam_Model_Pipeline_V1_ClosingBehavior) -> Bool {
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_ClosingBehavior.Enum: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
0: .same(proto: "UNSPECIFIED"),
1: .same(proto: "EMIT_ALWAYS"),
2: .same(proto: "EMIT_IF_NONEMPTY"),
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_OnTimeBehavior: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".OnTimeBehavior"
static let _protobuf_nameMap = SwiftProtobuf._NameMap()
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let _ = try decoder.nextFieldNumber() {
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_OnTimeBehavior, rhs: Org_Apache_Beam_Model_Pipeline_V1_OnTimeBehavior) -> Bool {
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_OnTimeBehavior.Enum: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
0: .same(proto: "UNSPECIFIED"),
1: .same(proto: "FIRE_ALWAYS"),
2: .same(proto: "FIRE_IF_NONEMPTY"),
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_OutputTime: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".OutputTime"
static let _protobuf_nameMap = SwiftProtobuf._NameMap()
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let _ = try decoder.nextFieldNumber() {
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_OutputTime, rhs: Org_Apache_Beam_Model_Pipeline_V1_OutputTime) -> Bool {
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_OutputTime.Enum: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
0: .same(proto: "UNSPECIFIED"),
1: .same(proto: "END_OF_WINDOW"),
2: .same(proto: "LATEST_IN_PANE"),
3: .same(proto: "EARLIEST_IN_PANE"),
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_TimeDomain: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".TimeDomain"
static let _protobuf_nameMap = SwiftProtobuf._NameMap()
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let _ = try decoder.nextFieldNumber() {
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_TimeDomain, rhs: Org_Apache_Beam_Model_Pipeline_V1_TimeDomain) -> Bool {
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_TimeDomain.Enum: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
0: .same(proto: "UNSPECIFIED"),
1: .same(proto: "EVENT_TIME"),
2: .same(proto: "PROCESSING_TIME"),
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".Trigger"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "after_all"),
2: .standard(proto: "after_any"),
3: .standard(proto: "after_each"),
4: .standard(proto: "after_end_of_window"),
5: .standard(proto: "after_processing_time"),
6: .standard(proto: "after_synchronized_processing_time"),
12: .same(proto: "always"),
7: .same(proto: "default"),
8: .standard(proto: "element_count"),
9: .same(proto: "never"),
10: .standard(proto: "or_finally"),
11: .same(proto: "repeat"),
]
fileprivate class _StorageClass {
var _trigger: Org_Apache_Beam_Model_Pipeline_V1_Trigger.OneOf_Trigger?
static let defaultInstance = _StorageClass()
private init() {}
init(copying source: _StorageClass) {
_trigger = source._trigger
}
}
fileprivate mutating func _uniqueStorage() -> _StorageClass {
if !isKnownUniquelyReferenced(&_storage) {
_storage = _StorageClass(copying: _storage)
}
return _storage
}
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
_ = _uniqueStorage()
try withExtendedLifetime(_storage) { (_storage: _StorageClass) in
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterAll?
var hadOneofValue = false
if let current = _storage._trigger {
hadOneofValue = true
if case .afterAll(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
_storage._trigger = .afterAll(v)
}
}()
case 2: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterAny?
var hadOneofValue = false
if let current = _storage._trigger {
hadOneofValue = true
if case .afterAny(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
_storage._trigger = .afterAny(v)
}
}()
case 3: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterEach?
var hadOneofValue = false
if let current = _storage._trigger {
hadOneofValue = true
if case .afterEach(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
_storage._trigger = .afterEach(v)
}
}()
case 4: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterEndOfWindow?
var hadOneofValue = false
if let current = _storage._trigger {
hadOneofValue = true
if case .afterEndOfWindow(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
_storage._trigger = .afterEndOfWindow(v)
}
}()
case 5: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterProcessingTime?
var hadOneofValue = false
if let current = _storage._trigger {
hadOneofValue = true
if case .afterProcessingTime(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
_storage._trigger = .afterProcessingTime(v)
}
}()
case 6: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterSynchronizedProcessingTime?
var hadOneofValue = false
if let current = _storage._trigger {
hadOneofValue = true
if case .afterSynchronizedProcessingTime(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
_storage._trigger = .afterSynchronizedProcessingTime(v)
}
}()
case 7: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_Trigger.Default?
var hadOneofValue = false
if let current = _storage._trigger {
hadOneofValue = true
if case .default(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
_storage._trigger = .default(v)
}
}()
case 8: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_Trigger.ElementCount?
var hadOneofValue = false
if let current = _storage._trigger {
hadOneofValue = true
if case .elementCount(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
_storage._trigger = .elementCount(v)
}
}()
case 9: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_Trigger.Never?
var hadOneofValue = false
if let current = _storage._trigger {
hadOneofValue = true
if case .never(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
_storage._trigger = .never(v)
}
}()
case 10: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_Trigger.OrFinally?
var hadOneofValue = false
if let current = _storage._trigger {
hadOneofValue = true
if case .orFinally(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
_storage._trigger = .orFinally(v)
}
}()
case 11: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_Trigger.Repeat?
var hadOneofValue = false
if let current = _storage._trigger {
hadOneofValue = true
if case .repeat(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
_storage._trigger = .repeat(v)
}
}()
case 12: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_Trigger.Always?
var hadOneofValue = false
if let current = _storage._trigger {
hadOneofValue = true
if case .always(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
_storage._trigger = .always(v)
}
}()
default: break
}
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try withExtendedLifetime(_storage) { (_storage: _StorageClass) in
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every if/case branch local when no optimizations
// are enabled. https://github.com/apple/swift-protobuf/issues/1034 and
// https://github.com/apple/swift-protobuf/issues/1182
switch _storage._trigger {
case .afterAll?: try {
guard case .afterAll(let v)? = _storage._trigger else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 1)
}()
case .afterAny?: try {
guard case .afterAny(let v)? = _storage._trigger else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 2)
}()
case .afterEach?: try {
guard case .afterEach(let v)? = _storage._trigger else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 3)
}()
case .afterEndOfWindow?: try {
guard case .afterEndOfWindow(let v)? = _storage._trigger else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 4)
}()
case .afterProcessingTime?: try {
guard case .afterProcessingTime(let v)? = _storage._trigger else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 5)
}()
case .afterSynchronizedProcessingTime?: try {
guard case .afterSynchronizedProcessingTime(let v)? = _storage._trigger else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 6)
}()
case .default?: try {
guard case .default(let v)? = _storage._trigger else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 7)
}()
case .elementCount?: try {
guard case .elementCount(let v)? = _storage._trigger else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 8)
}()
case .never?: try {
guard case .never(let v)? = _storage._trigger else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 9)
}()
case .orFinally?: try {
guard case .orFinally(let v)? = _storage._trigger else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 10)
}()
case .repeat?: try {
guard case .repeat(let v)? = _storage._trigger else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 11)
}()
case .always?: try {
guard case .always(let v)? = _storage._trigger else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 12)
}()
case nil: break
}
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger, rhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger) -> Bool {
if lhs._storage !== rhs._storage {
let storagesAreEqual: Bool = withExtendedLifetime((lhs._storage, rhs._storage)) { (_args: (_StorageClass, _StorageClass)) in
let _storage = _args.0
let rhs_storage = _args.1
if _storage._trigger != rhs_storage._trigger {return false}
return true
}
if !storagesAreEqual {return false}
}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterAll: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Org_Apache_Beam_Model_Pipeline_V1_Trigger.protoMessageName + ".AfterAll"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "subtriggers"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeRepeatedMessageField(value: &self.subtriggers) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.subtriggers.isEmpty {
try visitor.visitRepeatedMessageField(value: self.subtriggers, fieldNumber: 1)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterAll, rhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterAll) -> Bool {
if lhs.subtriggers != rhs.subtriggers {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterAny: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Org_Apache_Beam_Model_Pipeline_V1_Trigger.protoMessageName + ".AfterAny"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "subtriggers"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeRepeatedMessageField(value: &self.subtriggers) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.subtriggers.isEmpty {
try visitor.visitRepeatedMessageField(value: self.subtriggers, fieldNumber: 1)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterAny, rhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterAny) -> Bool {
if lhs.subtriggers != rhs.subtriggers {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterEach: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Org_Apache_Beam_Model_Pipeline_V1_Trigger.protoMessageName + ".AfterEach"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "subtriggers"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeRepeatedMessageField(value: &self.subtriggers) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.subtriggers.isEmpty {
try visitor.visitRepeatedMessageField(value: self.subtriggers, fieldNumber: 1)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterEach, rhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterEach) -> Bool {
if lhs.subtriggers != rhs.subtriggers {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterEndOfWindow: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Org_Apache_Beam_Model_Pipeline_V1_Trigger.protoMessageName + ".AfterEndOfWindow"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "early_firings"),
2: .standard(proto: "late_firings"),
]
fileprivate class _StorageClass {
var _earlyFirings: Org_Apache_Beam_Model_Pipeline_V1_Trigger? = nil
var _lateFirings: Org_Apache_Beam_Model_Pipeline_V1_Trigger? = nil
static let defaultInstance = _StorageClass()
private init() {}
init(copying source: _StorageClass) {
_earlyFirings = source._earlyFirings
_lateFirings = source._lateFirings
}
}
fileprivate mutating func _uniqueStorage() -> _StorageClass {
if !isKnownUniquelyReferenced(&_storage) {
_storage = _StorageClass(copying: _storage)
}
return _storage
}
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
_ = _uniqueStorage()
try withExtendedLifetime(_storage) { (_storage: _StorageClass) in
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularMessageField(value: &_storage._earlyFirings) }()
case 2: try { try decoder.decodeSingularMessageField(value: &_storage._lateFirings) }()
default: break
}
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try withExtendedLifetime(_storage) { (_storage: _StorageClass) in
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every if/case branch local when no optimizations
// are enabled. https://github.com/apple/swift-protobuf/issues/1034 and
// https://github.com/apple/swift-protobuf/issues/1182
try { if let v = _storage._earlyFirings {
try visitor.visitSingularMessageField(value: v, fieldNumber: 1)
} }()
try { if let v = _storage._lateFirings {
try visitor.visitSingularMessageField(value: v, fieldNumber: 2)
} }()
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterEndOfWindow, rhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterEndOfWindow) -> Bool {
if lhs._storage !== rhs._storage {
let storagesAreEqual: Bool = withExtendedLifetime((lhs._storage, rhs._storage)) { (_args: (_StorageClass, _StorageClass)) in
let _storage = _args.0
let rhs_storage = _args.1
if _storage._earlyFirings != rhs_storage._earlyFirings {return false}
if _storage._lateFirings != rhs_storage._lateFirings {return false}
return true
}
if !storagesAreEqual {return false}
}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterProcessingTime: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Org_Apache_Beam_Model_Pipeline_V1_Trigger.protoMessageName + ".AfterProcessingTime"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "timestamp_transforms"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeRepeatedMessageField(value: &self.timestampTransforms) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.timestampTransforms.isEmpty {
try visitor.visitRepeatedMessageField(value: self.timestampTransforms, fieldNumber: 1)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterProcessingTime, rhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterProcessingTime) -> Bool {
if lhs.timestampTransforms != rhs.timestampTransforms {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterSynchronizedProcessingTime: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Org_Apache_Beam_Model_Pipeline_V1_Trigger.protoMessageName + ".AfterSynchronizedProcessingTime"
static let _protobuf_nameMap = SwiftProtobuf._NameMap()
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let _ = try decoder.nextFieldNumber() {
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterSynchronizedProcessingTime, rhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger.AfterSynchronizedProcessingTime) -> Bool {
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger.Default: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Org_Apache_Beam_Model_Pipeline_V1_Trigger.protoMessageName + ".Default"
static let _protobuf_nameMap = SwiftProtobuf._NameMap()
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let _ = try decoder.nextFieldNumber() {
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger.Default, rhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger.Default) -> Bool {
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger.ElementCount: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Org_Apache_Beam_Model_Pipeline_V1_Trigger.protoMessageName + ".ElementCount"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "element_count"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularInt32Field(value: &self.elementCount) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if self.elementCount != 0 {
try visitor.visitSingularInt32Field(value: self.elementCount, fieldNumber: 1)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger.ElementCount, rhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger.ElementCount) -> Bool {
if lhs.elementCount != rhs.elementCount {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger.Never: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Org_Apache_Beam_Model_Pipeline_V1_Trigger.protoMessageName + ".Never"
static let _protobuf_nameMap = SwiftProtobuf._NameMap()
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let _ = try decoder.nextFieldNumber() {
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger.Never, rhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger.Never) -> Bool {
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger.Always: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Org_Apache_Beam_Model_Pipeline_V1_Trigger.protoMessageName + ".Always"
static let _protobuf_nameMap = SwiftProtobuf._NameMap()
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let _ = try decoder.nextFieldNumber() {
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger.Always, rhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger.Always) -> Bool {
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger.OrFinally: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Org_Apache_Beam_Model_Pipeline_V1_Trigger.protoMessageName + ".OrFinally"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "main"),
2: .same(proto: "finally"),
]
fileprivate class _StorageClass {
var _main: Org_Apache_Beam_Model_Pipeline_V1_Trigger? = nil
var _finally: Org_Apache_Beam_Model_Pipeline_V1_Trigger? = nil
static let defaultInstance = _StorageClass()
private init() {}
init(copying source: _StorageClass) {
_main = source._main
_finally = source._finally
}
}
fileprivate mutating func _uniqueStorage() -> _StorageClass {
if !isKnownUniquelyReferenced(&_storage) {
_storage = _StorageClass(copying: _storage)
}
return _storage
}
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
_ = _uniqueStorage()
try withExtendedLifetime(_storage) { (_storage: _StorageClass) in
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularMessageField(value: &_storage._main) }()
case 2: try { try decoder.decodeSingularMessageField(value: &_storage._finally) }()
default: break
}
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try withExtendedLifetime(_storage) { (_storage: _StorageClass) in
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every if/case branch local when no optimizations
// are enabled. https://github.com/apple/swift-protobuf/issues/1034 and
// https://github.com/apple/swift-protobuf/issues/1182
try { if let v = _storage._main {
try visitor.visitSingularMessageField(value: v, fieldNumber: 1)
} }()
try { if let v = _storage._finally {
try visitor.visitSingularMessageField(value: v, fieldNumber: 2)
} }()
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger.OrFinally, rhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger.OrFinally) -> Bool {
if lhs._storage !== rhs._storage {
let storagesAreEqual: Bool = withExtendedLifetime((lhs._storage, rhs._storage)) { (_args: (_StorageClass, _StorageClass)) in
let _storage = _args.0
let rhs_storage = _args.1
if _storage._main != rhs_storage._main {return false}
if _storage._finally != rhs_storage._finally {return false}
return true
}
if !storagesAreEqual {return false}
}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_Trigger.Repeat: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Org_Apache_Beam_Model_Pipeline_V1_Trigger.protoMessageName + ".Repeat"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "subtrigger"),
]
fileprivate class _StorageClass {
var _subtrigger: Org_Apache_Beam_Model_Pipeline_V1_Trigger? = nil
static let defaultInstance = _StorageClass()
private init() {}
init(copying source: _StorageClass) {
_subtrigger = source._subtrigger
}
}
fileprivate mutating func _uniqueStorage() -> _StorageClass {
if !isKnownUniquelyReferenced(&_storage) {
_storage = _StorageClass(copying: _storage)
}
return _storage
}
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
_ = _uniqueStorage()
try withExtendedLifetime(_storage) { (_storage: _StorageClass) in
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularMessageField(value: &_storage._subtrigger) }()
default: break
}
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try withExtendedLifetime(_storage) { (_storage: _StorageClass) in
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every if/case branch local when no optimizations
// are enabled. https://github.com/apple/swift-protobuf/issues/1034 and
// https://github.com/apple/swift-protobuf/issues/1182
try { if let v = _storage._subtrigger {
try visitor.visitSingularMessageField(value: v, fieldNumber: 1)
} }()
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger.Repeat, rhs: Org_Apache_Beam_Model_Pipeline_V1_Trigger.Repeat) -> Bool {
if lhs._storage !== rhs._storage {
let storagesAreEqual: Bool = withExtendedLifetime((lhs._storage, rhs._storage)) { (_args: (_StorageClass, _StorageClass)) in
let _storage = _args.0
let rhs_storage = _args.1
if _storage._subtrigger != rhs_storage._subtrigger {return false}
return true
}
if !storagesAreEqual {return false}
}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".TimestampTransform"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "delay"),
2: .standard(proto: "align_to"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform.Delay?
var hadOneofValue = false
if let current = self.timestampTransform {
hadOneofValue = true
if case .delay(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
self.timestampTransform = .delay(v)
}
}()
case 2: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform.AlignTo?
var hadOneofValue = false
if let current = self.timestampTransform {
hadOneofValue = true
if case .alignTo(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
self.timestampTransform = .alignTo(v)
}
}()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every if/case branch local when no optimizations
// are enabled. https://github.com/apple/swift-protobuf/issues/1034 and
// https://github.com/apple/swift-protobuf/issues/1182
switch self.timestampTransform {
case .delay?: try {
guard case .delay(let v)? = self.timestampTransform else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 1)
}()
case .alignTo?: try {
guard case .alignTo(let v)? = self.timestampTransform else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 2)
}()
case nil: break
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform, rhs: Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform) -> Bool {
if lhs.timestampTransform != rhs.timestampTransform {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform.Delay: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform.protoMessageName + ".Delay"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "delay_millis"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularInt64Field(value: &self.delayMillis) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if self.delayMillis != 0 {
try visitor.visitSingularInt64Field(value: self.delayMillis, fieldNumber: 1)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform.Delay, rhs: Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform.Delay) -> Bool {
if lhs.delayMillis != rhs.delayMillis {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform.AlignTo: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform.protoMessageName + ".AlignTo"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
3: .same(proto: "period"),
4: .same(proto: "offset"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 3: try { try decoder.decodeSingularInt64Field(value: &self.period) }()
case 4: try { try decoder.decodeSingularInt64Field(value: &self.offset) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if self.period != 0 {
try visitor.visitSingularInt64Field(value: self.period, fieldNumber: 3)
}
if self.offset != 0 {
try visitor.visitSingularInt64Field(value: self.offset, fieldNumber: 4)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform.AlignTo, rhs: Org_Apache_Beam_Model_Pipeline_V1_TimestampTransform.AlignTo) -> Bool {
if lhs.period != rhs.period {return false}
if lhs.offset != rhs.offset {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_SideInput: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".SideInput"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "access_pattern"),
2: .standard(proto: "view_fn"),
3: .standard(proto: "window_mapping_fn"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularMessageField(value: &self._accessPattern) }()
case 2: try { try decoder.decodeSingularMessageField(value: &self._viewFn) }()
case 3: try { try decoder.decodeSingularMessageField(value: &self._windowMappingFn) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every if/case branch local when no optimizations
// are enabled. https://github.com/apple/swift-protobuf/issues/1034 and
// https://github.com/apple/swift-protobuf/issues/1182
try { if let v = self._accessPattern {
try visitor.visitSingularMessageField(value: v, fieldNumber: 1)
} }()
try { if let v = self._viewFn {
try visitor.visitSingularMessageField(value: v, fieldNumber: 2)
} }()
try { if let v = self._windowMappingFn {
try visitor.visitSingularMessageField(value: v, fieldNumber: 3)
} }()
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_SideInput, rhs: Org_Apache_Beam_Model_Pipeline_V1_SideInput) -> Bool {
if lhs._accessPattern != rhs._accessPattern {return false}
if lhs._viewFn != rhs._viewFn {return false}
if lhs._windowMappingFn != rhs._windowMappingFn {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardArtifacts: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".StandardArtifacts"
static let _protobuf_nameMap = SwiftProtobuf._NameMap()
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let _ = try decoder.nextFieldNumber() {
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_StandardArtifacts, rhs: Org_Apache_Beam_Model_Pipeline_V1_StandardArtifacts) -> Bool {
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardArtifacts.Types: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
0: .same(proto: "FILE"),
1: .same(proto: "URL"),
2: .same(proto: "EMBEDDED"),
3: .same(proto: "PYPI"),
4: .same(proto: "MAVEN"),
5: .same(proto: "DEFERRED"),
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardArtifacts.Roles: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
0: .same(proto: "STAGING_TO"),
1: .same(proto: "PIP_REQUIREMENTS_FILE"),
2: .same(proto: "GO_WORKER_BINARY"),
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_ArtifactFilePayload: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".ArtifactFilePayload"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "path"),
2: .same(proto: "sha256"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.path) }()
case 2: try { try decoder.decodeSingularStringField(value: &self.sha256) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.path.isEmpty {
try visitor.visitSingularStringField(value: self.path, fieldNumber: 1)
}
if !self.sha256.isEmpty {
try visitor.visitSingularStringField(value: self.sha256, fieldNumber: 2)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_ArtifactFilePayload, rhs: Org_Apache_Beam_Model_Pipeline_V1_ArtifactFilePayload) -> Bool {
if lhs.path != rhs.path {return false}
if lhs.sha256 != rhs.sha256 {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_ArtifactUrlPayload: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".ArtifactUrlPayload"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "url"),
2: .same(proto: "sha256"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.url) }()
case 2: try { try decoder.decodeSingularStringField(value: &self.sha256) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.url.isEmpty {
try visitor.visitSingularStringField(value: self.url, fieldNumber: 1)
}
if !self.sha256.isEmpty {
try visitor.visitSingularStringField(value: self.sha256, fieldNumber: 2)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_ArtifactUrlPayload, rhs: Org_Apache_Beam_Model_Pipeline_V1_ArtifactUrlPayload) -> Bool {
if lhs.url != rhs.url {return false}
if lhs.sha256 != rhs.sha256 {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_EmbeddedFilePayload: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".EmbeddedFilePayload"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "data"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularBytesField(value: &self.data) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.data.isEmpty {
try visitor.visitSingularBytesField(value: self.data, fieldNumber: 1)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_EmbeddedFilePayload, rhs: Org_Apache_Beam_Model_Pipeline_V1_EmbeddedFilePayload) -> Bool {
if lhs.data != rhs.data {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_PyPIPayload: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".PyPIPayload"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "artifact_id"),
2: .same(proto: "version"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.artifactID) }()
case 2: try { try decoder.decodeSingularStringField(value: &self.version) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.artifactID.isEmpty {
try visitor.visitSingularStringField(value: self.artifactID, fieldNumber: 1)
}
if !self.version.isEmpty {
try visitor.visitSingularStringField(value: self.version, fieldNumber: 2)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_PyPIPayload, rhs: Org_Apache_Beam_Model_Pipeline_V1_PyPIPayload) -> Bool {
if lhs.artifactID != rhs.artifactID {return false}
if lhs.version != rhs.version {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_MavenPayload: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".MavenPayload"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "artifact"),
2: .standard(proto: "repository_url"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.artifact) }()
case 2: try { try decoder.decodeSingularStringField(value: &self.repositoryURL) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.artifact.isEmpty {
try visitor.visitSingularStringField(value: self.artifact, fieldNumber: 1)
}
if !self.repositoryURL.isEmpty {
try visitor.visitSingularStringField(value: self.repositoryURL, fieldNumber: 2)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_MavenPayload, rhs: Org_Apache_Beam_Model_Pipeline_V1_MavenPayload) -> Bool {
if lhs.artifact != rhs.artifact {return false}
if lhs.repositoryURL != rhs.repositoryURL {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_DeferredArtifactPayload: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".DeferredArtifactPayload"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "key"),
2: .same(proto: "data"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.key) }()
case 2: try { try decoder.decodeSingularBytesField(value: &self.data) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.key.isEmpty {
try visitor.visitSingularStringField(value: self.key, fieldNumber: 1)
}
if !self.data.isEmpty {
try visitor.visitSingularBytesField(value: self.data, fieldNumber: 2)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_DeferredArtifactPayload, rhs: Org_Apache_Beam_Model_Pipeline_V1_DeferredArtifactPayload) -> Bool {
if lhs.key != rhs.key {return false}
if lhs.data != rhs.data {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_ArtifactStagingToRolePayload: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".ArtifactStagingToRolePayload"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "staged_name"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.stagedName) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.stagedName.isEmpty {
try visitor.visitSingularStringField(value: self.stagedName, fieldNumber: 1)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_ArtifactStagingToRolePayload, rhs: Org_Apache_Beam_Model_Pipeline_V1_ArtifactStagingToRolePayload) -> Bool {
if lhs.stagedName != rhs.stagedName {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_ArtifactInformation: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".ArtifactInformation"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "type_urn"),
2: .standard(proto: "type_payload"),
3: .standard(proto: "role_urn"),
4: .standard(proto: "role_payload"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.typeUrn) }()
case 2: try { try decoder.decodeSingularBytesField(value: &self.typePayload) }()
case 3: try { try decoder.decodeSingularStringField(value: &self.roleUrn) }()
case 4: try { try decoder.decodeSingularBytesField(value: &self.rolePayload) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.typeUrn.isEmpty {
try visitor.visitSingularStringField(value: self.typeUrn, fieldNumber: 1)
}
if !self.typePayload.isEmpty {
try visitor.visitSingularBytesField(value: self.typePayload, fieldNumber: 2)
}
if !self.roleUrn.isEmpty {
try visitor.visitSingularStringField(value: self.roleUrn, fieldNumber: 3)
}
if !self.rolePayload.isEmpty {
try visitor.visitSingularBytesField(value: self.rolePayload, fieldNumber: 4)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_ArtifactInformation, rhs: Org_Apache_Beam_Model_Pipeline_V1_ArtifactInformation) -> Bool {
if lhs.typeUrn != rhs.typeUrn {return false}
if lhs.typePayload != rhs.typePayload {return false}
if lhs.roleUrn != rhs.roleUrn {return false}
if lhs.rolePayload != rhs.rolePayload {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_Environment: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".Environment"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
2: .same(proto: "urn"),
3: .same(proto: "payload"),
4: .standard(proto: "display_data"),
5: .same(proto: "capabilities"),
6: .same(proto: "dependencies"),
7: .standard(proto: "resource_hints"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 2: try { try decoder.decodeSingularStringField(value: &self.urn) }()
case 3: try { try decoder.decodeSingularBytesField(value: &self.payload) }()
case 4: try { try decoder.decodeRepeatedMessageField(value: &self.displayData) }()
case 5: try { try decoder.decodeRepeatedStringField(value: &self.capabilities) }()
case 6: try { try decoder.decodeRepeatedMessageField(value: &self.dependencies) }()
case 7: try { try decoder.decodeMapField(fieldType: SwiftProtobuf._ProtobufMap<SwiftProtobuf.ProtobufString,SwiftProtobuf.ProtobufBytes>.self, value: &self.resourceHints) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.urn.isEmpty {
try visitor.visitSingularStringField(value: self.urn, fieldNumber: 2)
}
if !self.payload.isEmpty {
try visitor.visitSingularBytesField(value: self.payload, fieldNumber: 3)
}
if !self.displayData.isEmpty {
try visitor.visitRepeatedMessageField(value: self.displayData, fieldNumber: 4)
}
if !self.capabilities.isEmpty {
try visitor.visitRepeatedStringField(value: self.capabilities, fieldNumber: 5)
}
if !self.dependencies.isEmpty {
try visitor.visitRepeatedMessageField(value: self.dependencies, fieldNumber: 6)
}
if !self.resourceHints.isEmpty {
try visitor.visitMapField(fieldType: SwiftProtobuf._ProtobufMap<SwiftProtobuf.ProtobufString,SwiftProtobuf.ProtobufBytes>.self, value: self.resourceHints, fieldNumber: 7)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_Environment, rhs: Org_Apache_Beam_Model_Pipeline_V1_Environment) -> Bool {
if lhs.urn != rhs.urn {return false}
if lhs.payload != rhs.payload {return false}
if lhs.displayData != rhs.displayData {return false}
if lhs.capabilities != rhs.capabilities {return false}
if lhs.dependencies != rhs.dependencies {return false}
if lhs.resourceHints != rhs.resourceHints {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardEnvironments: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".StandardEnvironments"
static let _protobuf_nameMap = SwiftProtobuf._NameMap()
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let _ = try decoder.nextFieldNumber() {
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_StandardEnvironments, rhs: Org_Apache_Beam_Model_Pipeline_V1_StandardEnvironments) -> Bool {
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardEnvironments.Environments: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
0: .same(proto: "DOCKER"),
1: .same(proto: "PROCESS"),
2: .same(proto: "EXTERNAL"),
3: .same(proto: "DEFAULT"),
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_DockerPayload: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".DockerPayload"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "container_image"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.containerImage) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.containerImage.isEmpty {
try visitor.visitSingularStringField(value: self.containerImage, fieldNumber: 1)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_DockerPayload, rhs: Org_Apache_Beam_Model_Pipeline_V1_DockerPayload) -> Bool {
if lhs.containerImage != rhs.containerImage {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_ProcessPayload: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".ProcessPayload"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "os"),
2: .same(proto: "arch"),
3: .same(proto: "command"),
4: .same(proto: "env"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.os) }()
case 2: try { try decoder.decodeSingularStringField(value: &self.arch) }()
case 3: try { try decoder.decodeSingularStringField(value: &self.command) }()
case 4: try { try decoder.decodeMapField(fieldType: SwiftProtobuf._ProtobufMap<SwiftProtobuf.ProtobufString,SwiftProtobuf.ProtobufString>.self, value: &self.env) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.os.isEmpty {
try visitor.visitSingularStringField(value: self.os, fieldNumber: 1)
}
if !self.arch.isEmpty {
try visitor.visitSingularStringField(value: self.arch, fieldNumber: 2)
}
if !self.command.isEmpty {
try visitor.visitSingularStringField(value: self.command, fieldNumber: 3)
}
if !self.env.isEmpty {
try visitor.visitMapField(fieldType: SwiftProtobuf._ProtobufMap<SwiftProtobuf.ProtobufString,SwiftProtobuf.ProtobufString>.self, value: self.env, fieldNumber: 4)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_ProcessPayload, rhs: Org_Apache_Beam_Model_Pipeline_V1_ProcessPayload) -> Bool {
if lhs.os != rhs.os {return false}
if lhs.arch != rhs.arch {return false}
if lhs.command != rhs.command {return false}
if lhs.env != rhs.env {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_ExternalPayload: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".ExternalPayload"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "endpoint"),
2: .same(proto: "params"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularMessageField(value: &self._endpoint) }()
case 2: try { try decoder.decodeMapField(fieldType: SwiftProtobuf._ProtobufMap<SwiftProtobuf.ProtobufString,SwiftProtobuf.ProtobufString>.self, value: &self.params) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every if/case branch local when no optimizations
// are enabled. https://github.com/apple/swift-protobuf/issues/1034 and
// https://github.com/apple/swift-protobuf/issues/1182
try { if let v = self._endpoint {
try visitor.visitSingularMessageField(value: v, fieldNumber: 1)
} }()
if !self.params.isEmpty {
try visitor.visitMapField(fieldType: SwiftProtobuf._ProtobufMap<SwiftProtobuf.ProtobufString,SwiftProtobuf.ProtobufString>.self, value: self.params, fieldNumber: 2)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_ExternalPayload, rhs: Org_Apache_Beam_Model_Pipeline_V1_ExternalPayload) -> Bool {
if lhs._endpoint != rhs._endpoint {return false}
if lhs.params != rhs.params {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardProtocols: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".StandardProtocols"
static let _protobuf_nameMap = SwiftProtobuf._NameMap()
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let _ = try decoder.nextFieldNumber() {
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_StandardProtocols, rhs: Org_Apache_Beam_Model_Pipeline_V1_StandardProtocols) -> Bool {
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardProtocols.Enum: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
0: .same(proto: "LEGACY_PROGRESS_REPORTING"),
1: .same(proto: "PROGRESS_REPORTING"),
2: .same(proto: "WORKER_STATUS"),
3: .same(proto: "MULTI_CORE_BUNDLE_PROCESSING"),
4: .same(proto: "HARNESS_MONITORING_INFOS"),
5: .same(proto: "SIBLING_WORKERS"),
6: .same(proto: "CONTROL_REQUEST_ELEMENTS_EMBEDDING"),
7: .same(proto: "STATE_CACHING"),
8: .same(proto: "DATA_SAMPLING"),
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardRunnerProtocols: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".StandardRunnerProtocols"
static let _protobuf_nameMap = SwiftProtobuf._NameMap()
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let _ = try decoder.nextFieldNumber() {
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_StandardRunnerProtocols, rhs: Org_Apache_Beam_Model_Pipeline_V1_StandardRunnerProtocols) -> Bool {
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardRunnerProtocols.Enum: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
0: .same(proto: "MONITORING_INFO_SHORT_IDS"),
6: .same(proto: "CONTROL_RESPONSE_ELEMENTS_EMBEDDING"),
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardRequirements: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".StandardRequirements"
static let _protobuf_nameMap = SwiftProtobuf._NameMap()
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let _ = try decoder.nextFieldNumber() {
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_StandardRequirements, rhs: Org_Apache_Beam_Model_Pipeline_V1_StandardRequirements) -> Bool {
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardRequirements.Enum: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
0: .same(proto: "REQUIRES_STATEFUL_PROCESSING"),
1: .same(proto: "REQUIRES_BUNDLE_FINALIZATION"),
2: .same(proto: "REQUIRES_STABLE_INPUT"),
3: .same(proto: "REQUIRES_TIME_SORTED_INPUT"),
4: .same(proto: "REQUIRES_SPLITTABLE_DOFN"),
5: .same(proto: "REQUIRES_ON_WINDOW_EXPIRATION"),
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".FunctionSpec"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "urn"),
3: .same(proto: "payload"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.urn) }()
case 3: try { try decoder.decodeSingularBytesField(value: &self.payload) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.urn.isEmpty {
try visitor.visitSingularStringField(value: self.urn, fieldNumber: 1)
}
if !self.payload.isEmpty {
try visitor.visitSingularBytesField(value: self.payload, fieldNumber: 3)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec, rhs: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec) -> Bool {
if lhs.urn != rhs.urn {return false}
if lhs.payload != rhs.payload {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardDisplayData: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".StandardDisplayData"
static let _protobuf_nameMap = SwiftProtobuf._NameMap()
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let _ = try decoder.nextFieldNumber() {
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_StandardDisplayData, rhs: Org_Apache_Beam_Model_Pipeline_V1_StandardDisplayData) -> Bool {
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardDisplayData.DisplayData: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
0: .same(proto: "LABELLED"),
]
}
extension Org_Apache_Beam_Model_Pipeline_V1_LabelledPayload: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".LabelledPayload"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "label"),
2: .standard(proto: "string_value"),
3: .standard(proto: "bool_value"),
4: .standard(proto: "double_value"),
5: .standard(proto: "int_value"),
6: .same(proto: "key"),
7: .same(proto: "namespace"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.label) }()
case 2: try {
var v: String?
try decoder.decodeSingularStringField(value: &v)
if let v = v {
if self.value != nil {try decoder.handleConflictingOneOf()}
self.value = .stringValue(v)
}
}()
case 3: try {
var v: Bool?
try decoder.decodeSingularBoolField(value: &v)
if let v = v {
if self.value != nil {try decoder.handleConflictingOneOf()}
self.value = .boolValue(v)
}
}()
case 4: try {
var v: Double?
try decoder.decodeSingularDoubleField(value: &v)
if let v = v {
if self.value != nil {try decoder.handleConflictingOneOf()}
self.value = .doubleValue(v)
}
}()
case 5: try {
var v: Int64?
try decoder.decodeSingularInt64Field(value: &v)
if let v = v {
if self.value != nil {try decoder.handleConflictingOneOf()}
self.value = .intValue(v)
}
}()
case 6: try { try decoder.decodeSingularStringField(value: &self.key) }()
case 7: try { try decoder.decodeSingularStringField(value: &self.namespace) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every if/case branch local when no optimizations
// are enabled. https://github.com/apple/swift-protobuf/issues/1034 and
// https://github.com/apple/swift-protobuf/issues/1182
if !self.label.isEmpty {
try visitor.visitSingularStringField(value: self.label, fieldNumber: 1)
}
switch self.value {
case .stringValue?: try {
guard case .stringValue(let v)? = self.value else { preconditionFailure() }
try visitor.visitSingularStringField(value: v, fieldNumber: 2)
}()
case .boolValue?: try {
guard case .boolValue(let v)? = self.value else { preconditionFailure() }
try visitor.visitSingularBoolField(value: v, fieldNumber: 3)
}()
case .doubleValue?: try {
guard case .doubleValue(let v)? = self.value else { preconditionFailure() }
try visitor.visitSingularDoubleField(value: v, fieldNumber: 4)
}()
case .intValue?: try {
guard case .intValue(let v)? = self.value else { preconditionFailure() }
try visitor.visitSingularInt64Field(value: v, fieldNumber: 5)
}()
case nil: break
}
if !self.key.isEmpty {
try visitor.visitSingularStringField(value: self.key, fieldNumber: 6)
}
if !self.namespace.isEmpty {
try visitor.visitSingularStringField(value: self.namespace, fieldNumber: 7)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_LabelledPayload, rhs: Org_Apache_Beam_Model_Pipeline_V1_LabelledPayload) -> Bool {
if lhs.label != rhs.label {return false}
if lhs.value != rhs.value {return false}
if lhs.key != rhs.key {return false}
if lhs.namespace != rhs.namespace {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_DisplayData: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".DisplayData"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "urn"),
2: .same(proto: "payload"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.urn) }()
case 2: try { try decoder.decodeSingularBytesField(value: &self.payload) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.urn.isEmpty {
try visitor.visitSingularStringField(value: self.urn, fieldNumber: 1)
}
if !self.payload.isEmpty {
try visitor.visitSingularBytesField(value: self.payload, fieldNumber: 2)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_DisplayData, rhs: Org_Apache_Beam_Model_Pipeline_V1_DisplayData) -> Bool {
if lhs.urn != rhs.urn {return false}
if lhs.payload != rhs.payload {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_MessageWithComponents: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".MessageWithComponents"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "components"),
2: .same(proto: "coder"),
3: .standard(proto: "combine_payload"),
4: .standard(proto: "function_spec"),
6: .standard(proto: "par_do_payload"),
7: .same(proto: "ptransform"),
8: .same(proto: "pcollection"),
9: .standard(proto: "read_payload"),
11: .standard(proto: "side_input"),
12: .standard(proto: "window_into_payload"),
13: .standard(proto: "windowing_strategy"),
]
fileprivate class _StorageClass {
var _components: Org_Apache_Beam_Model_Pipeline_V1_Components? = nil
var _root: Org_Apache_Beam_Model_Pipeline_V1_MessageWithComponents.OneOf_Root?
static let defaultInstance = _StorageClass()
private init() {}
init(copying source: _StorageClass) {
_components = source._components
_root = source._root
}
}
fileprivate mutating func _uniqueStorage() -> _StorageClass {
if !isKnownUniquelyReferenced(&_storage) {
_storage = _StorageClass(copying: _storage)
}
return _storage
}
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
_ = _uniqueStorage()
try withExtendedLifetime(_storage) { (_storage: _StorageClass) in
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularMessageField(value: &_storage._components) }()
case 2: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_Coder?
var hadOneofValue = false
if let current = _storage._root {
hadOneofValue = true
if case .coder(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
_storage._root = .coder(v)
}
}()
case 3: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_CombinePayload?
var hadOneofValue = false
if let current = _storage._root {
hadOneofValue = true
if case .combinePayload(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
_storage._root = .combinePayload(v)
}
}()
case 4: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_FunctionSpec?
var hadOneofValue = false
if let current = _storage._root {
hadOneofValue = true
if case .functionSpec(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
_storage._root = .functionSpec(v)
}
}()
case 6: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_ParDoPayload?
var hadOneofValue = false
if let current = _storage._root {
hadOneofValue = true
if case .parDoPayload(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
_storage._root = .parDoPayload(v)
}
}()
case 7: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_PTransform?
var hadOneofValue = false
if let current = _storage._root {
hadOneofValue = true
if case .ptransform(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
_storage._root = .ptransform(v)
}
}()
case 8: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_PCollection?
var hadOneofValue = false
if let current = _storage._root {
hadOneofValue = true
if case .pcollection(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
_storage._root = .pcollection(v)
}
}()
case 9: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_ReadPayload?
var hadOneofValue = false
if let current = _storage._root {
hadOneofValue = true
if case .readPayload(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
_storage._root = .readPayload(v)
}
}()
case 11: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_SideInput?
var hadOneofValue = false
if let current = _storage._root {
hadOneofValue = true
if case .sideInput(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
_storage._root = .sideInput(v)
}
}()
case 12: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_WindowIntoPayload?
var hadOneofValue = false
if let current = _storage._root {
hadOneofValue = true
if case .windowIntoPayload(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
_storage._root = .windowIntoPayload(v)
}
}()
case 13: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_WindowingStrategy?
var hadOneofValue = false
if let current = _storage._root {
hadOneofValue = true
if case .windowingStrategy(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
_storage._root = .windowingStrategy(v)
}
}()
default: break
}
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try withExtendedLifetime(_storage) { (_storage: _StorageClass) in
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every if/case branch local when no optimizations
// are enabled. https://github.com/apple/swift-protobuf/issues/1034 and
// https://github.com/apple/swift-protobuf/issues/1182
try { if let v = _storage._components {
try visitor.visitSingularMessageField(value: v, fieldNumber: 1)
} }()
switch _storage._root {
case .coder?: try {
guard case .coder(let v)? = _storage._root else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 2)
}()
case .combinePayload?: try {
guard case .combinePayload(let v)? = _storage._root else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 3)
}()
case .functionSpec?: try {
guard case .functionSpec(let v)? = _storage._root else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 4)
}()
case .parDoPayload?: try {
guard case .parDoPayload(let v)? = _storage._root else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 6)
}()
case .ptransform?: try {
guard case .ptransform(let v)? = _storage._root else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 7)
}()
case .pcollection?: try {
guard case .pcollection(let v)? = _storage._root else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 8)
}()
case .readPayload?: try {
guard case .readPayload(let v)? = _storage._root else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 9)
}()
case .sideInput?: try {
guard case .sideInput(let v)? = _storage._root else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 11)
}()
case .windowIntoPayload?: try {
guard case .windowIntoPayload(let v)? = _storage._root else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 12)
}()
case .windowingStrategy?: try {
guard case .windowingStrategy(let v)? = _storage._root else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 13)
}()
case nil: break
}
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_MessageWithComponents, rhs: Org_Apache_Beam_Model_Pipeline_V1_MessageWithComponents) -> Bool {
if lhs._storage !== rhs._storage {
let storagesAreEqual: Bool = withExtendedLifetime((lhs._storage, rhs._storage)) { (_args: (_StorageClass, _StorageClass)) in
let _storage = _args.0
let rhs_storage = _args.1
if _storage._components != rhs_storage._components {return false}
if _storage._root != rhs_storage._root {return false}
return true
}
if !storagesAreEqual {return false}
}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".ExecutableStagePayload"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "environment"),
9: .standard(proto: "wire_coder_settings"),
2: .same(proto: "input"),
3: .standard(proto: "side_inputs"),
4: .same(proto: "transforms"),
5: .same(proto: "outputs"),
6: .same(proto: "components"),
7: .standard(proto: "user_states"),
8: .same(proto: "timers"),
10: .same(proto: "timerFamilies"),
]
fileprivate class _StorageClass {
var _environment: Org_Apache_Beam_Model_Pipeline_V1_Environment? = nil
var _wireCoderSettings: [Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.WireCoderSetting] = []
var _input: String = String()
var _sideInputs: [Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.SideInputId] = []
var _transforms: [String] = []
var _outputs: [String] = []
var _components: Org_Apache_Beam_Model_Pipeline_V1_Components? = nil
var _userStates: [Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.UserStateId] = []
var _timers: [Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.TimerId] = []
var _timerFamilies: [Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.TimerFamilyId] = []
static let defaultInstance = _StorageClass()
private init() {}
init(copying source: _StorageClass) {
_environment = source._environment
_wireCoderSettings = source._wireCoderSettings
_input = source._input
_sideInputs = source._sideInputs
_transforms = source._transforms
_outputs = source._outputs
_components = source._components
_userStates = source._userStates
_timers = source._timers
_timerFamilies = source._timerFamilies
}
}
fileprivate mutating func _uniqueStorage() -> _StorageClass {
if !isKnownUniquelyReferenced(&_storage) {
_storage = _StorageClass(copying: _storage)
}
return _storage
}
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
_ = _uniqueStorage()
try withExtendedLifetime(_storage) { (_storage: _StorageClass) in
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularMessageField(value: &_storage._environment) }()
case 2: try { try decoder.decodeSingularStringField(value: &_storage._input) }()
case 3: try { try decoder.decodeRepeatedMessageField(value: &_storage._sideInputs) }()
case 4: try { try decoder.decodeRepeatedStringField(value: &_storage._transforms) }()
case 5: try { try decoder.decodeRepeatedStringField(value: &_storage._outputs) }()
case 6: try { try decoder.decodeSingularMessageField(value: &_storage._components) }()
case 7: try { try decoder.decodeRepeatedMessageField(value: &_storage._userStates) }()
case 8: try { try decoder.decodeRepeatedMessageField(value: &_storage._timers) }()
case 9: try { try decoder.decodeRepeatedMessageField(value: &_storage._wireCoderSettings) }()
case 10: try { try decoder.decodeRepeatedMessageField(value: &_storage._timerFamilies) }()
default: break
}
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try withExtendedLifetime(_storage) { (_storage: _StorageClass) in
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every if/case branch local when no optimizations
// are enabled. https://github.com/apple/swift-protobuf/issues/1034 and
// https://github.com/apple/swift-protobuf/issues/1182
try { if let v = _storage._environment {
try visitor.visitSingularMessageField(value: v, fieldNumber: 1)
} }()
if !_storage._input.isEmpty {
try visitor.visitSingularStringField(value: _storage._input, fieldNumber: 2)
}
if !_storage._sideInputs.isEmpty {
try visitor.visitRepeatedMessageField(value: _storage._sideInputs, fieldNumber: 3)
}
if !_storage._transforms.isEmpty {
try visitor.visitRepeatedStringField(value: _storage._transforms, fieldNumber: 4)
}
if !_storage._outputs.isEmpty {
try visitor.visitRepeatedStringField(value: _storage._outputs, fieldNumber: 5)
}
try { if let v = _storage._components {
try visitor.visitSingularMessageField(value: v, fieldNumber: 6)
} }()
if !_storage._userStates.isEmpty {
try visitor.visitRepeatedMessageField(value: _storage._userStates, fieldNumber: 7)
}
if !_storage._timers.isEmpty {
try visitor.visitRepeatedMessageField(value: _storage._timers, fieldNumber: 8)
}
if !_storage._wireCoderSettings.isEmpty {
try visitor.visitRepeatedMessageField(value: _storage._wireCoderSettings, fieldNumber: 9)
}
if !_storage._timerFamilies.isEmpty {
try visitor.visitRepeatedMessageField(value: _storage._timerFamilies, fieldNumber: 10)
}
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload, rhs: Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload) -> Bool {
if lhs._storage !== rhs._storage {
let storagesAreEqual: Bool = withExtendedLifetime((lhs._storage, rhs._storage)) { (_args: (_StorageClass, _StorageClass)) in
let _storage = _args.0
let rhs_storage = _args.1
if _storage._environment != rhs_storage._environment {return false}
if _storage._wireCoderSettings != rhs_storage._wireCoderSettings {return false}
if _storage._input != rhs_storage._input {return false}
if _storage._sideInputs != rhs_storage._sideInputs {return false}
if _storage._transforms != rhs_storage._transforms {return false}
if _storage._outputs != rhs_storage._outputs {return false}
if _storage._components != rhs_storage._components {return false}
if _storage._userStates != rhs_storage._userStates {return false}
if _storage._timers != rhs_storage._timers {return false}
if _storage._timerFamilies != rhs_storage._timerFamilies {return false}
return true
}
if !storagesAreEqual {return false}
}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.SideInputId: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.protoMessageName + ".SideInputId"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "transform_id"),
2: .standard(proto: "local_name"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.transformID) }()
case 2: try { try decoder.decodeSingularStringField(value: &self.localName) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.transformID.isEmpty {
try visitor.visitSingularStringField(value: self.transformID, fieldNumber: 1)
}
if !self.localName.isEmpty {
try visitor.visitSingularStringField(value: self.localName, fieldNumber: 2)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.SideInputId, rhs: Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.SideInputId) -> Bool {
if lhs.transformID != rhs.transformID {return false}
if lhs.localName != rhs.localName {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.UserStateId: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.protoMessageName + ".UserStateId"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "transform_id"),
2: .standard(proto: "local_name"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.transformID) }()
case 2: try { try decoder.decodeSingularStringField(value: &self.localName) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.transformID.isEmpty {
try visitor.visitSingularStringField(value: self.transformID, fieldNumber: 1)
}
if !self.localName.isEmpty {
try visitor.visitSingularStringField(value: self.localName, fieldNumber: 2)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.UserStateId, rhs: Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.UserStateId) -> Bool {
if lhs.transformID != rhs.transformID {return false}
if lhs.localName != rhs.localName {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.TimerId: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.protoMessageName + ".TimerId"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "transform_id"),
2: .standard(proto: "local_name"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.transformID) }()
case 2: try { try decoder.decodeSingularStringField(value: &self.localName) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.transformID.isEmpty {
try visitor.visitSingularStringField(value: self.transformID, fieldNumber: 1)
}
if !self.localName.isEmpty {
try visitor.visitSingularStringField(value: self.localName, fieldNumber: 2)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.TimerId, rhs: Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.TimerId) -> Bool {
if lhs.transformID != rhs.transformID {return false}
if lhs.localName != rhs.localName {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.TimerFamilyId: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.protoMessageName + ".TimerFamilyId"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .standard(proto: "transform_id"),
2: .standard(proto: "local_name"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.transformID) }()
case 2: try { try decoder.decodeSingularStringField(value: &self.localName) }()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
if !self.transformID.isEmpty {
try visitor.visitSingularStringField(value: self.transformID, fieldNumber: 1)
}
if !self.localName.isEmpty {
try visitor.visitSingularStringField(value: self.localName, fieldNumber: 2)
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.TimerFamilyId, rhs: Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.TimerFamilyId) -> Bool {
if lhs.transformID != rhs.transformID {return false}
if lhs.localName != rhs.localName {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.WireCoderSetting: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.protoMessageName + ".WireCoderSetting"
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
1: .same(proto: "urn"),
2: .same(proto: "payload"),
3: .standard(proto: "input_or_output_id"),
4: .same(proto: "timer"),
]
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let fieldNumber = try decoder.nextFieldNumber() {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every case branch when no optimizations are
// enabled. https://github.com/apple/swift-protobuf/issues/1034
switch fieldNumber {
case 1: try { try decoder.decodeSingularStringField(value: &self.urn) }()
case 2: try { try decoder.decodeSingularBytesField(value: &self.payload) }()
case 3: try {
var v: String?
try decoder.decodeSingularStringField(value: &v)
if let v = v {
if self.target != nil {try decoder.handleConflictingOneOf()}
self.target = .inputOrOutputID(v)
}
}()
case 4: try {
var v: Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.TimerId?
var hadOneofValue = false
if let current = self.target {
hadOneofValue = true
if case .timer(let m) = current {v = m}
}
try decoder.decodeSingularMessageField(value: &v)
if let v = v {
if hadOneofValue {try decoder.handleConflictingOneOf()}
self.target = .timer(v)
}
}()
default: break
}
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
// The use of inline closures is to circumvent an issue where the compiler
// allocates stack space for every if/case branch local when no optimizations
// are enabled. https://github.com/apple/swift-protobuf/issues/1034 and
// https://github.com/apple/swift-protobuf/issues/1182
if !self.urn.isEmpty {
try visitor.visitSingularStringField(value: self.urn, fieldNumber: 1)
}
if !self.payload.isEmpty {
try visitor.visitSingularBytesField(value: self.payload, fieldNumber: 2)
}
switch self.target {
case .inputOrOutputID?: try {
guard case .inputOrOutputID(let v)? = self.target else { preconditionFailure() }
try visitor.visitSingularStringField(value: v, fieldNumber: 3)
}()
case .timer?: try {
guard case .timer(let v)? = self.target else { preconditionFailure() }
try visitor.visitSingularMessageField(value: v, fieldNumber: 4)
}()
case nil: break
}
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.WireCoderSetting, rhs: Org_Apache_Beam_Model_Pipeline_V1_ExecutableStagePayload.WireCoderSetting) -> Bool {
if lhs.urn != rhs.urn {return false}
if lhs.payload != rhs.payload {return false}
if lhs.target != rhs.target {return false}
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardResourceHints: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding {
static let protoMessageName: String = _protobuf_package + ".StandardResourceHints"
static let _protobuf_nameMap = SwiftProtobuf._NameMap()
mutating func decodeMessage<D: SwiftProtobuf.Decoder>(decoder: inout D) throws {
while let _ = try decoder.nextFieldNumber() {
}
}
func traverse<V: SwiftProtobuf.Visitor>(visitor: inout V) throws {
try unknownFields.traverse(visitor: &visitor)
}
static func ==(lhs: Org_Apache_Beam_Model_Pipeline_V1_StandardResourceHints, rhs: Org_Apache_Beam_Model_Pipeline_V1_StandardResourceHints) -> Bool {
if lhs.unknownFields != rhs.unknownFields {return false}
return true
}
}
extension Org_Apache_Beam_Model_Pipeline_V1_StandardResourceHints.Enum: SwiftProtobuf._ProtoNameProviding {
static let _protobuf_nameMap: SwiftProtobuf._NameMap = [
0: .same(proto: "ACCELERATOR"),
1: .same(proto: "MIN_RAM_BYTES"),
]
}