| // Generated by the protocol buffer compiler. DO NOT EDIT! |
| // source: src/protobuf/LlapDaemonProtocol.proto |
| |
| package org.apache.hadoop.hive.llap.daemon.rpc; |
| |
| public final class LlapDaemonProtocolProtos { |
| private LlapDaemonProtocolProtos() {} |
| public static void registerAllExtensions( |
| com.google.protobuf.ExtensionRegistry registry) { |
| } |
| /** |
| * Protobuf enum {@code SourceStateProto} |
| */ |
| public enum SourceStateProto |
| implements com.google.protobuf.ProtocolMessageEnum { |
| /** |
| * <code>S_SUCCEEDED = 1;</code> |
| */ |
| S_SUCCEEDED(0, 1), |
| /** |
| * <code>S_RUNNING = 2;</code> |
| */ |
| S_RUNNING(1, 2), |
| ; |
| |
| /** |
| * <code>S_SUCCEEDED = 1;</code> |
| */ |
| public static final int S_SUCCEEDED_VALUE = 1; |
| /** |
| * <code>S_RUNNING = 2;</code> |
| */ |
| public static final int S_RUNNING_VALUE = 2; |
| |
| |
| public final int getNumber() { return value; } |
| |
| public static SourceStateProto valueOf(int value) { |
| switch (value) { |
| case 1: return S_SUCCEEDED; |
| case 2: return S_RUNNING; |
| default: return null; |
| } |
| } |
| |
| public static com.google.protobuf.Internal.EnumLiteMap<SourceStateProto> |
| internalGetValueMap() { |
| return internalValueMap; |
| } |
| private static com.google.protobuf.Internal.EnumLiteMap<SourceStateProto> |
| internalValueMap = |
| new com.google.protobuf.Internal.EnumLiteMap<SourceStateProto>() { |
| public SourceStateProto findValueByNumber(int number) { |
| return SourceStateProto.valueOf(number); |
| } |
| }; |
| |
| public final com.google.protobuf.Descriptors.EnumValueDescriptor |
| getValueDescriptor() { |
| return getDescriptor().getValues().get(index); |
| } |
| public final com.google.protobuf.Descriptors.EnumDescriptor |
| getDescriptorForType() { |
| return getDescriptor(); |
| } |
| public static final com.google.protobuf.Descriptors.EnumDescriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.getDescriptor().getEnumTypes().get(0); |
| } |
| |
| private static final SourceStateProto[] VALUES = values(); |
| |
| public static SourceStateProto valueOf( |
| com.google.protobuf.Descriptors.EnumValueDescriptor desc) { |
| if (desc.getType() != getDescriptor()) { |
| throw new java.lang.IllegalArgumentException( |
| "EnumValueDescriptor is not for this type."); |
| } |
| return VALUES[desc.getIndex()]; |
| } |
| |
| private final int index; |
| private final int value; |
| |
| private SourceStateProto(int index, int value) { |
| this.index = index; |
| this.value = value; |
| } |
| |
| // @@protoc_insertion_point(enum_scope:SourceStateProto) |
| } |
| |
| /** |
| * Protobuf enum {@code SubmissionStateProto} |
| */ |
| public enum SubmissionStateProto |
| implements com.google.protobuf.ProtocolMessageEnum { |
| /** |
| * <code>ACCEPTED = 1;</code> |
| */ |
| ACCEPTED(0, 1), |
| /** |
| * <code>REJECTED = 2;</code> |
| */ |
| REJECTED(1, 2), |
| /** |
| * <code>EVICTED_OTHER = 3;</code> |
| */ |
| EVICTED_OTHER(2, 3), |
| ; |
| |
| /** |
| * <code>ACCEPTED = 1;</code> |
| */ |
| public static final int ACCEPTED_VALUE = 1; |
| /** |
| * <code>REJECTED = 2;</code> |
| */ |
| public static final int REJECTED_VALUE = 2; |
| /** |
| * <code>EVICTED_OTHER = 3;</code> |
| */ |
| public static final int EVICTED_OTHER_VALUE = 3; |
| |
| |
| public final int getNumber() { return value; } |
| |
| public static SubmissionStateProto valueOf(int value) { |
| switch (value) { |
| case 1: return ACCEPTED; |
| case 2: return REJECTED; |
| case 3: return EVICTED_OTHER; |
| default: return null; |
| } |
| } |
| |
| public static com.google.protobuf.Internal.EnumLiteMap<SubmissionStateProto> |
| internalGetValueMap() { |
| return internalValueMap; |
| } |
| private static com.google.protobuf.Internal.EnumLiteMap<SubmissionStateProto> |
| internalValueMap = |
| new com.google.protobuf.Internal.EnumLiteMap<SubmissionStateProto>() { |
| public SubmissionStateProto findValueByNumber(int number) { |
| return SubmissionStateProto.valueOf(number); |
| } |
| }; |
| |
| public final com.google.protobuf.Descriptors.EnumValueDescriptor |
| getValueDescriptor() { |
| return getDescriptor().getValues().get(index); |
| } |
| public final com.google.protobuf.Descriptors.EnumDescriptor |
| getDescriptorForType() { |
| return getDescriptor(); |
| } |
| public static final com.google.protobuf.Descriptors.EnumDescriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.getDescriptor().getEnumTypes().get(1); |
| } |
| |
| private static final SubmissionStateProto[] VALUES = values(); |
| |
| public static SubmissionStateProto valueOf( |
| com.google.protobuf.Descriptors.EnumValueDescriptor desc) { |
| if (desc.getType() != getDescriptor()) { |
| throw new java.lang.IllegalArgumentException( |
| "EnumValueDescriptor is not for this type."); |
| } |
| return VALUES[desc.getIndex()]; |
| } |
| |
| private final int index; |
| private final int value; |
| |
| private SubmissionStateProto(int index, int value) { |
| this.index = index; |
| this.value = value; |
| } |
| |
| // @@protoc_insertion_point(enum_scope:SubmissionStateProto) |
| } |
| |
| public interface UserPayloadProtoOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // optional bytes user_payload = 1; |
| /** |
| * <code>optional bytes user_payload = 1;</code> |
| */ |
| boolean hasUserPayload(); |
| /** |
| * <code>optional bytes user_payload = 1;</code> |
| */ |
| com.google.protobuf.ByteString getUserPayload(); |
| |
| // optional int32 version = 2; |
| /** |
| * <code>optional int32 version = 2;</code> |
| */ |
| boolean hasVersion(); |
| /** |
| * <code>optional int32 version = 2;</code> |
| */ |
| int getVersion(); |
| } |
| /** |
| * Protobuf type {@code UserPayloadProto} |
| */ |
| public static final class UserPayloadProto extends |
| com.google.protobuf.GeneratedMessage |
| implements UserPayloadProtoOrBuilder { |
| // Use UserPayloadProto.newBuilder() to construct. |
| private UserPayloadProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) { |
| super(builder); |
| this.unknownFields = builder.getUnknownFields(); |
| } |
| private UserPayloadProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } |
| |
| private static final UserPayloadProto defaultInstance; |
| public static UserPayloadProto getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public UserPayloadProto getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| private final com.google.protobuf.UnknownFieldSet unknownFields; |
| @java.lang.Override |
| public final com.google.protobuf.UnknownFieldSet |
| getUnknownFields() { |
| return this.unknownFields; |
| } |
| private UserPayloadProto( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| initFields(); |
| int mutable_bitField0_ = 0; |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder(); |
| try { |
| boolean done = false; |
| while (!done) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| done = true; |
| break; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| done = true; |
| } |
| break; |
| } |
| case 10: { |
| bitField0_ |= 0x00000001; |
| userPayload_ = input.readBytes(); |
| break; |
| } |
| case 16: { |
| bitField0_ |= 0x00000002; |
| version_ = input.readInt32(); |
| break; |
| } |
| } |
| } |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| throw e.setUnfinishedMessage(this); |
| } catch (java.io.IOException e) { |
| throw new com.google.protobuf.InvalidProtocolBufferException( |
| e.getMessage()).setUnfinishedMessage(this); |
| } finally { |
| this.unknownFields = unknownFields.build(); |
| makeExtensionsImmutable(); |
| } |
| } |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_UserPayloadProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_UserPayloadProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto.Builder.class); |
| } |
| |
| public static com.google.protobuf.Parser<UserPayloadProto> PARSER = |
| new com.google.protobuf.AbstractParser<UserPayloadProto>() { |
| public UserPayloadProto parsePartialFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return new UserPayloadProto(input, extensionRegistry); |
| } |
| }; |
| |
| @java.lang.Override |
| public com.google.protobuf.Parser<UserPayloadProto> getParserForType() { |
| return PARSER; |
| } |
| |
| private int bitField0_; |
| // optional bytes user_payload = 1; |
| public static final int USER_PAYLOAD_FIELD_NUMBER = 1; |
| private com.google.protobuf.ByteString userPayload_; |
| /** |
| * <code>optional bytes user_payload = 1;</code> |
| */ |
| public boolean hasUserPayload() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional bytes user_payload = 1;</code> |
| */ |
| public com.google.protobuf.ByteString getUserPayload() { |
| return userPayload_; |
| } |
| |
| // optional int32 version = 2; |
| public static final int VERSION_FIELD_NUMBER = 2; |
| private int version_; |
| /** |
| * <code>optional int32 version = 2;</code> |
| */ |
| public boolean hasVersion() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional int32 version = 2;</code> |
| */ |
| public int getVersion() { |
| return version_; |
| } |
| |
| private void initFields() { |
| userPayload_ = com.google.protobuf.ByteString.EMPTY; |
| version_ = 0; |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeBytes(1, userPayload_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| output.writeInt32(2, version_); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(1, userPayload_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeInt32Size(2, version_); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto) obj; |
| |
| boolean result = true; |
| result = result && (hasUserPayload() == other.hasUserPayload()); |
| if (hasUserPayload()) { |
| result = result && getUserPayload() |
| .equals(other.getUserPayload()); |
| } |
| result = result && (hasVersion() == other.hasVersion()); |
| if (hasVersion()) { |
| result = result && (getVersion() |
| == other.getVersion()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| private int memoizedHashCode = 0; |
| @java.lang.Override |
| public int hashCode() { |
| if (memoizedHashCode != 0) { |
| return memoizedHashCode; |
| } |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasUserPayload()) { |
| hash = (37 * hash) + USER_PAYLOAD_FIELD_NUMBER; |
| hash = (53 * hash) + getUserPayload().hashCode(); |
| } |
| if (hasVersion()) { |
| hash = (37 * hash) + VERSION_FIELD_NUMBER; |
| hash = (53 * hash) + getVersion(); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| memoizedHashCode = hash; |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| /** |
| * Protobuf type {@code UserPayloadProto} |
| */ |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProtoOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_UserPayloadProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_UserPayloadProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto.Builder.class); |
| } |
| |
| // Construct using org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| userPayload_ = com.google.protobuf.ByteString.EMPTY; |
| bitField0_ = (bitField0_ & ~0x00000001); |
| version_ = 0; |
| bitField0_ = (bitField0_ & ~0x00000002); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_UserPayloadProto_descriptor; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto getDefaultInstanceForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto build() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto buildPartial() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| result.userPayload_ = userPayload_; |
| if (((from_bitField0_ & 0x00000002) == 0x00000002)) { |
| to_bitField0_ |= 0x00000002; |
| } |
| result.version_ = version_; |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto) { |
| return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto other) { |
| if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto.getDefaultInstance()) return this; |
| if (other.hasUserPayload()) { |
| setUserPayload(other.getUserPayload()); |
| } |
| if (other.hasVersion()) { |
| setVersion(other.getVersion()); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto parsedMessage = null; |
| try { |
| parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto) e.getUnfinishedMessage(); |
| throw e; |
| } finally { |
| if (parsedMessage != null) { |
| mergeFrom(parsedMessage); |
| } |
| } |
| return this; |
| } |
| private int bitField0_; |
| |
| // optional bytes user_payload = 1; |
| private com.google.protobuf.ByteString userPayload_ = com.google.protobuf.ByteString.EMPTY; |
| /** |
| * <code>optional bytes user_payload = 1;</code> |
| */ |
| public boolean hasUserPayload() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional bytes user_payload = 1;</code> |
| */ |
| public com.google.protobuf.ByteString getUserPayload() { |
| return userPayload_; |
| } |
| /** |
| * <code>optional bytes user_payload = 1;</code> |
| */ |
| public Builder setUserPayload(com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000001; |
| userPayload_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional bytes user_payload = 1;</code> |
| */ |
| public Builder clearUserPayload() { |
| bitField0_ = (bitField0_ & ~0x00000001); |
| userPayload_ = getDefaultInstance().getUserPayload(); |
| onChanged(); |
| return this; |
| } |
| |
| // optional int32 version = 2; |
| private int version_ ; |
| /** |
| * <code>optional int32 version = 2;</code> |
| */ |
| public boolean hasVersion() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional int32 version = 2;</code> |
| */ |
| public int getVersion() { |
| return version_; |
| } |
| /** |
| * <code>optional int32 version = 2;</code> |
| */ |
| public Builder setVersion(int value) { |
| bitField0_ |= 0x00000002; |
| version_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional int32 version = 2;</code> |
| */ |
| public Builder clearVersion() { |
| bitField0_ = (bitField0_ & ~0x00000002); |
| version_ = 0; |
| onChanged(); |
| return this; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:UserPayloadProto) |
| } |
| |
| static { |
| defaultInstance = new UserPayloadProto(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:UserPayloadProto) |
| } |
| |
| public interface EntityDescriptorProtoOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // optional string class_name = 1; |
| /** |
| * <code>optional string class_name = 1;</code> |
| */ |
| boolean hasClassName(); |
| /** |
| * <code>optional string class_name = 1;</code> |
| */ |
| java.lang.String getClassName(); |
| /** |
| * <code>optional string class_name = 1;</code> |
| */ |
| com.google.protobuf.ByteString |
| getClassNameBytes(); |
| |
| // optional .UserPayloadProto user_payload = 2; |
| /** |
| * <code>optional .UserPayloadProto user_payload = 2;</code> |
| */ |
| boolean hasUserPayload(); |
| /** |
| * <code>optional .UserPayloadProto user_payload = 2;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto getUserPayload(); |
| /** |
| * <code>optional .UserPayloadProto user_payload = 2;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProtoOrBuilder getUserPayloadOrBuilder(); |
| |
| // optional bytes history_text = 3; |
| /** |
| * <code>optional bytes history_text = 3;</code> |
| */ |
| boolean hasHistoryText(); |
| /** |
| * <code>optional bytes history_text = 3;</code> |
| */ |
| com.google.protobuf.ByteString getHistoryText(); |
| } |
| /** |
| * Protobuf type {@code EntityDescriptorProto} |
| */ |
| public static final class EntityDescriptorProto extends |
| com.google.protobuf.GeneratedMessage |
| implements EntityDescriptorProtoOrBuilder { |
| // Use EntityDescriptorProto.newBuilder() to construct. |
| private EntityDescriptorProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) { |
| super(builder); |
| this.unknownFields = builder.getUnknownFields(); |
| } |
| private EntityDescriptorProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } |
| |
| private static final EntityDescriptorProto defaultInstance; |
| public static EntityDescriptorProto getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public EntityDescriptorProto getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| private final com.google.protobuf.UnknownFieldSet unknownFields; |
| @java.lang.Override |
| public final com.google.protobuf.UnknownFieldSet |
| getUnknownFields() { |
| return this.unknownFields; |
| } |
| private EntityDescriptorProto( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| initFields(); |
| int mutable_bitField0_ = 0; |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder(); |
| try { |
| boolean done = false; |
| while (!done) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| done = true; |
| break; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| done = true; |
| } |
| break; |
| } |
| case 10: { |
| bitField0_ |= 0x00000001; |
| className_ = input.readBytes(); |
| break; |
| } |
| case 18: { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto.Builder subBuilder = null; |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| subBuilder = userPayload_.toBuilder(); |
| } |
| userPayload_ = input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto.PARSER, extensionRegistry); |
| if (subBuilder != null) { |
| subBuilder.mergeFrom(userPayload_); |
| userPayload_ = subBuilder.buildPartial(); |
| } |
| bitField0_ |= 0x00000002; |
| break; |
| } |
| case 26: { |
| bitField0_ |= 0x00000004; |
| historyText_ = input.readBytes(); |
| break; |
| } |
| } |
| } |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| throw e.setUnfinishedMessage(this); |
| } catch (java.io.IOException e) { |
| throw new com.google.protobuf.InvalidProtocolBufferException( |
| e.getMessage()).setUnfinishedMessage(this); |
| } finally { |
| this.unknownFields = unknownFields.build(); |
| makeExtensionsImmutable(); |
| } |
| } |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_EntityDescriptorProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_EntityDescriptorProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.Builder.class); |
| } |
| |
| public static com.google.protobuf.Parser<EntityDescriptorProto> PARSER = |
| new com.google.protobuf.AbstractParser<EntityDescriptorProto>() { |
| public EntityDescriptorProto parsePartialFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return new EntityDescriptorProto(input, extensionRegistry); |
| } |
| }; |
| |
| @java.lang.Override |
| public com.google.protobuf.Parser<EntityDescriptorProto> getParserForType() { |
| return PARSER; |
| } |
| |
| private int bitField0_; |
| // optional string class_name = 1; |
| public static final int CLASS_NAME_FIELD_NUMBER = 1; |
| private java.lang.Object className_; |
| /** |
| * <code>optional string class_name = 1;</code> |
| */ |
| public boolean hasClassName() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional string class_name = 1;</code> |
| */ |
| public java.lang.String getClassName() { |
| java.lang.Object ref = className_; |
| if (ref instanceof java.lang.String) { |
| return (java.lang.String) ref; |
| } else { |
| com.google.protobuf.ByteString bs = |
| (com.google.protobuf.ByteString) ref; |
| java.lang.String s = bs.toStringUtf8(); |
| if (bs.isValidUtf8()) { |
| className_ = s; |
| } |
| return s; |
| } |
| } |
| /** |
| * <code>optional string class_name = 1;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getClassNameBytes() { |
| java.lang.Object ref = className_; |
| if (ref instanceof java.lang.String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| className_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| |
| // optional .UserPayloadProto user_payload = 2; |
| public static final int USER_PAYLOAD_FIELD_NUMBER = 2; |
| private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto userPayload_; |
| /** |
| * <code>optional .UserPayloadProto user_payload = 2;</code> |
| */ |
| public boolean hasUserPayload() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional .UserPayloadProto user_payload = 2;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto getUserPayload() { |
| return userPayload_; |
| } |
| /** |
| * <code>optional .UserPayloadProto user_payload = 2;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProtoOrBuilder getUserPayloadOrBuilder() { |
| return userPayload_; |
| } |
| |
| // optional bytes history_text = 3; |
| public static final int HISTORY_TEXT_FIELD_NUMBER = 3; |
| private com.google.protobuf.ByteString historyText_; |
| /** |
| * <code>optional bytes history_text = 3;</code> |
| */ |
| public boolean hasHistoryText() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| /** |
| * <code>optional bytes history_text = 3;</code> |
| */ |
| public com.google.protobuf.ByteString getHistoryText() { |
| return historyText_; |
| } |
| |
| private void initFields() { |
| className_ = ""; |
| userPayload_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto.getDefaultInstance(); |
| historyText_ = com.google.protobuf.ByteString.EMPTY; |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeBytes(1, getClassNameBytes()); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| output.writeMessage(2, userPayload_); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| output.writeBytes(3, historyText_); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(1, getClassNameBytes()); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(2, userPayload_); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(3, historyText_); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto) obj; |
| |
| boolean result = true; |
| result = result && (hasClassName() == other.hasClassName()); |
| if (hasClassName()) { |
| result = result && getClassName() |
| .equals(other.getClassName()); |
| } |
| result = result && (hasUserPayload() == other.hasUserPayload()); |
| if (hasUserPayload()) { |
| result = result && getUserPayload() |
| .equals(other.getUserPayload()); |
| } |
| result = result && (hasHistoryText() == other.hasHistoryText()); |
| if (hasHistoryText()) { |
| result = result && getHistoryText() |
| .equals(other.getHistoryText()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| private int memoizedHashCode = 0; |
| @java.lang.Override |
| public int hashCode() { |
| if (memoizedHashCode != 0) { |
| return memoizedHashCode; |
| } |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasClassName()) { |
| hash = (37 * hash) + CLASS_NAME_FIELD_NUMBER; |
| hash = (53 * hash) + getClassName().hashCode(); |
| } |
| if (hasUserPayload()) { |
| hash = (37 * hash) + USER_PAYLOAD_FIELD_NUMBER; |
| hash = (53 * hash) + getUserPayload().hashCode(); |
| } |
| if (hasHistoryText()) { |
| hash = (37 * hash) + HISTORY_TEXT_FIELD_NUMBER; |
| hash = (53 * hash) + getHistoryText().hashCode(); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| memoizedHashCode = hash; |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| /** |
| * Protobuf type {@code EntityDescriptorProto} |
| */ |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProtoOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_EntityDescriptorProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_EntityDescriptorProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.Builder.class); |
| } |
| |
| // Construct using org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| getUserPayloadFieldBuilder(); |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| className_ = ""; |
| bitField0_ = (bitField0_ & ~0x00000001); |
| if (userPayloadBuilder_ == null) { |
| userPayload_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto.getDefaultInstance(); |
| } else { |
| userPayloadBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000002); |
| historyText_ = com.google.protobuf.ByteString.EMPTY; |
| bitField0_ = (bitField0_ & ~0x00000004); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_EntityDescriptorProto_descriptor; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto getDefaultInstanceForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto build() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto buildPartial() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| result.className_ = className_; |
| if (((from_bitField0_ & 0x00000002) == 0x00000002)) { |
| to_bitField0_ |= 0x00000002; |
| } |
| if (userPayloadBuilder_ == null) { |
| result.userPayload_ = userPayload_; |
| } else { |
| result.userPayload_ = userPayloadBuilder_.build(); |
| } |
| if (((from_bitField0_ & 0x00000004) == 0x00000004)) { |
| to_bitField0_ |= 0x00000004; |
| } |
| result.historyText_ = historyText_; |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto) { |
| return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto other) { |
| if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.getDefaultInstance()) return this; |
| if (other.hasClassName()) { |
| bitField0_ |= 0x00000001; |
| className_ = other.className_; |
| onChanged(); |
| } |
| if (other.hasUserPayload()) { |
| mergeUserPayload(other.getUserPayload()); |
| } |
| if (other.hasHistoryText()) { |
| setHistoryText(other.getHistoryText()); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto parsedMessage = null; |
| try { |
| parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto) e.getUnfinishedMessage(); |
| throw e; |
| } finally { |
| if (parsedMessage != null) { |
| mergeFrom(parsedMessage); |
| } |
| } |
| return this; |
| } |
| private int bitField0_; |
| |
| // optional string class_name = 1; |
| private java.lang.Object className_ = ""; |
| /** |
| * <code>optional string class_name = 1;</code> |
| */ |
| public boolean hasClassName() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional string class_name = 1;</code> |
| */ |
| public java.lang.String getClassName() { |
| java.lang.Object ref = className_; |
| if (!(ref instanceof java.lang.String)) { |
| java.lang.String s = ((com.google.protobuf.ByteString) ref) |
| .toStringUtf8(); |
| className_ = s; |
| return s; |
| } else { |
| return (java.lang.String) ref; |
| } |
| } |
| /** |
| * <code>optional string class_name = 1;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getClassNameBytes() { |
| java.lang.Object ref = className_; |
| if (ref instanceof String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| className_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| /** |
| * <code>optional string class_name = 1;</code> |
| */ |
| public Builder setClassName( |
| java.lang.String value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000001; |
| className_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string class_name = 1;</code> |
| */ |
| public Builder clearClassName() { |
| bitField0_ = (bitField0_ & ~0x00000001); |
| className_ = getDefaultInstance().getClassName(); |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string class_name = 1;</code> |
| */ |
| public Builder setClassNameBytes( |
| com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000001; |
| className_ = value; |
| onChanged(); |
| return this; |
| } |
| |
| // optional .UserPayloadProto user_payload = 2; |
| private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto userPayload_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto.getDefaultInstance(); |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProtoOrBuilder> userPayloadBuilder_; |
| /** |
| * <code>optional .UserPayloadProto user_payload = 2;</code> |
| */ |
| public boolean hasUserPayload() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional .UserPayloadProto user_payload = 2;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto getUserPayload() { |
| if (userPayloadBuilder_ == null) { |
| return userPayload_; |
| } else { |
| return userPayloadBuilder_.getMessage(); |
| } |
| } |
| /** |
| * <code>optional .UserPayloadProto user_payload = 2;</code> |
| */ |
| public Builder setUserPayload(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto value) { |
| if (userPayloadBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| userPayload_ = value; |
| onChanged(); |
| } else { |
| userPayloadBuilder_.setMessage(value); |
| } |
| bitField0_ |= 0x00000002; |
| return this; |
| } |
| /** |
| * <code>optional .UserPayloadProto user_payload = 2;</code> |
| */ |
| public Builder setUserPayload( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto.Builder builderForValue) { |
| if (userPayloadBuilder_ == null) { |
| userPayload_ = builderForValue.build(); |
| onChanged(); |
| } else { |
| userPayloadBuilder_.setMessage(builderForValue.build()); |
| } |
| bitField0_ |= 0x00000002; |
| return this; |
| } |
| /** |
| * <code>optional .UserPayloadProto user_payload = 2;</code> |
| */ |
| public Builder mergeUserPayload(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto value) { |
| if (userPayloadBuilder_ == null) { |
| if (((bitField0_ & 0x00000002) == 0x00000002) && |
| userPayload_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto.getDefaultInstance()) { |
| userPayload_ = |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto.newBuilder(userPayload_).mergeFrom(value).buildPartial(); |
| } else { |
| userPayload_ = value; |
| } |
| onChanged(); |
| } else { |
| userPayloadBuilder_.mergeFrom(value); |
| } |
| bitField0_ |= 0x00000002; |
| return this; |
| } |
| /** |
| * <code>optional .UserPayloadProto user_payload = 2;</code> |
| */ |
| public Builder clearUserPayload() { |
| if (userPayloadBuilder_ == null) { |
| userPayload_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto.getDefaultInstance(); |
| onChanged(); |
| } else { |
| userPayloadBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000002); |
| return this; |
| } |
| /** |
| * <code>optional .UserPayloadProto user_payload = 2;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto.Builder getUserPayloadBuilder() { |
| bitField0_ |= 0x00000002; |
| onChanged(); |
| return getUserPayloadFieldBuilder().getBuilder(); |
| } |
| /** |
| * <code>optional .UserPayloadProto user_payload = 2;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProtoOrBuilder getUserPayloadOrBuilder() { |
| if (userPayloadBuilder_ != null) { |
| return userPayloadBuilder_.getMessageOrBuilder(); |
| } else { |
| return userPayload_; |
| } |
| } |
| /** |
| * <code>optional .UserPayloadProto user_payload = 2;</code> |
| */ |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProtoOrBuilder> |
| getUserPayloadFieldBuilder() { |
| if (userPayloadBuilder_ == null) { |
| userPayloadBuilder_ = new com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.UserPayloadProtoOrBuilder>( |
| userPayload_, |
| getParentForChildren(), |
| isClean()); |
| userPayload_ = null; |
| } |
| return userPayloadBuilder_; |
| } |
| |
| // optional bytes history_text = 3; |
| private com.google.protobuf.ByteString historyText_ = com.google.protobuf.ByteString.EMPTY; |
| /** |
| * <code>optional bytes history_text = 3;</code> |
| */ |
| public boolean hasHistoryText() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| /** |
| * <code>optional bytes history_text = 3;</code> |
| */ |
| public com.google.protobuf.ByteString getHistoryText() { |
| return historyText_; |
| } |
| /** |
| * <code>optional bytes history_text = 3;</code> |
| */ |
| public Builder setHistoryText(com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000004; |
| historyText_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional bytes history_text = 3;</code> |
| */ |
| public Builder clearHistoryText() { |
| bitField0_ = (bitField0_ & ~0x00000004); |
| historyText_ = getDefaultInstance().getHistoryText(); |
| onChanged(); |
| return this; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:EntityDescriptorProto) |
| } |
| |
| static { |
| defaultInstance = new EntityDescriptorProto(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:EntityDescriptorProto) |
| } |
| |
| public interface IOSpecProtoOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // optional string connected_vertex_name = 1; |
| /** |
| * <code>optional string connected_vertex_name = 1;</code> |
| */ |
| boolean hasConnectedVertexName(); |
| /** |
| * <code>optional string connected_vertex_name = 1;</code> |
| */ |
| java.lang.String getConnectedVertexName(); |
| /** |
| * <code>optional string connected_vertex_name = 1;</code> |
| */ |
| com.google.protobuf.ByteString |
| getConnectedVertexNameBytes(); |
| |
| // optional .EntityDescriptorProto io_descriptor = 2; |
| /** |
| * <code>optional .EntityDescriptorProto io_descriptor = 2;</code> |
| */ |
| boolean hasIoDescriptor(); |
| /** |
| * <code>optional .EntityDescriptorProto io_descriptor = 2;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto getIoDescriptor(); |
| /** |
| * <code>optional .EntityDescriptorProto io_descriptor = 2;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProtoOrBuilder getIoDescriptorOrBuilder(); |
| |
| // optional int32 physical_edge_count = 3; |
| /** |
| * <code>optional int32 physical_edge_count = 3;</code> |
| */ |
| boolean hasPhysicalEdgeCount(); |
| /** |
| * <code>optional int32 physical_edge_count = 3;</code> |
| */ |
| int getPhysicalEdgeCount(); |
| } |
| /** |
| * Protobuf type {@code IOSpecProto} |
| */ |
| public static final class IOSpecProto extends |
| com.google.protobuf.GeneratedMessage |
| implements IOSpecProtoOrBuilder { |
| // Use IOSpecProto.newBuilder() to construct. |
| private IOSpecProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) { |
| super(builder); |
| this.unknownFields = builder.getUnknownFields(); |
| } |
| private IOSpecProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } |
| |
| private static final IOSpecProto defaultInstance; |
| public static IOSpecProto getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public IOSpecProto getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| private final com.google.protobuf.UnknownFieldSet unknownFields; |
| @java.lang.Override |
| public final com.google.protobuf.UnknownFieldSet |
| getUnknownFields() { |
| return this.unknownFields; |
| } |
| private IOSpecProto( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| initFields(); |
| int mutable_bitField0_ = 0; |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder(); |
| try { |
| boolean done = false; |
| while (!done) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| done = true; |
| break; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| done = true; |
| } |
| break; |
| } |
| case 10: { |
| bitField0_ |= 0x00000001; |
| connectedVertexName_ = input.readBytes(); |
| break; |
| } |
| case 18: { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.Builder subBuilder = null; |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| subBuilder = ioDescriptor_.toBuilder(); |
| } |
| ioDescriptor_ = input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.PARSER, extensionRegistry); |
| if (subBuilder != null) { |
| subBuilder.mergeFrom(ioDescriptor_); |
| ioDescriptor_ = subBuilder.buildPartial(); |
| } |
| bitField0_ |= 0x00000002; |
| break; |
| } |
| case 24: { |
| bitField0_ |= 0x00000004; |
| physicalEdgeCount_ = input.readInt32(); |
| break; |
| } |
| } |
| } |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| throw e.setUnfinishedMessage(this); |
| } catch (java.io.IOException e) { |
| throw new com.google.protobuf.InvalidProtocolBufferException( |
| e.getMessage()).setUnfinishedMessage(this); |
| } finally { |
| this.unknownFields = unknownFields.build(); |
| makeExtensionsImmutable(); |
| } |
| } |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_IOSpecProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_IOSpecProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder.class); |
| } |
| |
| public static com.google.protobuf.Parser<IOSpecProto> PARSER = |
| new com.google.protobuf.AbstractParser<IOSpecProto>() { |
| public IOSpecProto parsePartialFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return new IOSpecProto(input, extensionRegistry); |
| } |
| }; |
| |
| @java.lang.Override |
| public com.google.protobuf.Parser<IOSpecProto> getParserForType() { |
| return PARSER; |
| } |
| |
| private int bitField0_; |
| // optional string connected_vertex_name = 1; |
| public static final int CONNECTED_VERTEX_NAME_FIELD_NUMBER = 1; |
| private java.lang.Object connectedVertexName_; |
| /** |
| * <code>optional string connected_vertex_name = 1;</code> |
| */ |
| public boolean hasConnectedVertexName() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional string connected_vertex_name = 1;</code> |
| */ |
| public java.lang.String getConnectedVertexName() { |
| java.lang.Object ref = connectedVertexName_; |
| if (ref instanceof java.lang.String) { |
| return (java.lang.String) ref; |
| } else { |
| com.google.protobuf.ByteString bs = |
| (com.google.protobuf.ByteString) ref; |
| java.lang.String s = bs.toStringUtf8(); |
| if (bs.isValidUtf8()) { |
| connectedVertexName_ = s; |
| } |
| return s; |
| } |
| } |
| /** |
| * <code>optional string connected_vertex_name = 1;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getConnectedVertexNameBytes() { |
| java.lang.Object ref = connectedVertexName_; |
| if (ref instanceof java.lang.String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| connectedVertexName_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| |
| // optional .EntityDescriptorProto io_descriptor = 2; |
| public static final int IO_DESCRIPTOR_FIELD_NUMBER = 2; |
| private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto ioDescriptor_; |
| /** |
| * <code>optional .EntityDescriptorProto io_descriptor = 2;</code> |
| */ |
| public boolean hasIoDescriptor() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional .EntityDescriptorProto io_descriptor = 2;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto getIoDescriptor() { |
| return ioDescriptor_; |
| } |
| /** |
| * <code>optional .EntityDescriptorProto io_descriptor = 2;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProtoOrBuilder getIoDescriptorOrBuilder() { |
| return ioDescriptor_; |
| } |
| |
| // optional int32 physical_edge_count = 3; |
| public static final int PHYSICAL_EDGE_COUNT_FIELD_NUMBER = 3; |
| private int physicalEdgeCount_; |
| /** |
| * <code>optional int32 physical_edge_count = 3;</code> |
| */ |
| public boolean hasPhysicalEdgeCount() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| /** |
| * <code>optional int32 physical_edge_count = 3;</code> |
| */ |
| public int getPhysicalEdgeCount() { |
| return physicalEdgeCount_; |
| } |
| |
| private void initFields() { |
| connectedVertexName_ = ""; |
| ioDescriptor_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.getDefaultInstance(); |
| physicalEdgeCount_ = 0; |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeBytes(1, getConnectedVertexNameBytes()); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| output.writeMessage(2, ioDescriptor_); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| output.writeInt32(3, physicalEdgeCount_); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(1, getConnectedVertexNameBytes()); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(2, ioDescriptor_); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeInt32Size(3, physicalEdgeCount_); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto) obj; |
| |
| boolean result = true; |
| result = result && (hasConnectedVertexName() == other.hasConnectedVertexName()); |
| if (hasConnectedVertexName()) { |
| result = result && getConnectedVertexName() |
| .equals(other.getConnectedVertexName()); |
| } |
| result = result && (hasIoDescriptor() == other.hasIoDescriptor()); |
| if (hasIoDescriptor()) { |
| result = result && getIoDescriptor() |
| .equals(other.getIoDescriptor()); |
| } |
| result = result && (hasPhysicalEdgeCount() == other.hasPhysicalEdgeCount()); |
| if (hasPhysicalEdgeCount()) { |
| result = result && (getPhysicalEdgeCount() |
| == other.getPhysicalEdgeCount()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| private int memoizedHashCode = 0; |
| @java.lang.Override |
| public int hashCode() { |
| if (memoizedHashCode != 0) { |
| return memoizedHashCode; |
| } |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasConnectedVertexName()) { |
| hash = (37 * hash) + CONNECTED_VERTEX_NAME_FIELD_NUMBER; |
| hash = (53 * hash) + getConnectedVertexName().hashCode(); |
| } |
| if (hasIoDescriptor()) { |
| hash = (37 * hash) + IO_DESCRIPTOR_FIELD_NUMBER; |
| hash = (53 * hash) + getIoDescriptor().hashCode(); |
| } |
| if (hasPhysicalEdgeCount()) { |
| hash = (37 * hash) + PHYSICAL_EDGE_COUNT_FIELD_NUMBER; |
| hash = (53 * hash) + getPhysicalEdgeCount(); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| memoizedHashCode = hash; |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| /** |
| * Protobuf type {@code IOSpecProto} |
| */ |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_IOSpecProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_IOSpecProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder.class); |
| } |
| |
| // Construct using org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| getIoDescriptorFieldBuilder(); |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| connectedVertexName_ = ""; |
| bitField0_ = (bitField0_ & ~0x00000001); |
| if (ioDescriptorBuilder_ == null) { |
| ioDescriptor_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.getDefaultInstance(); |
| } else { |
| ioDescriptorBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000002); |
| physicalEdgeCount_ = 0; |
| bitField0_ = (bitField0_ & ~0x00000004); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_IOSpecProto_descriptor; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto getDefaultInstanceForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto build() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto buildPartial() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| result.connectedVertexName_ = connectedVertexName_; |
| if (((from_bitField0_ & 0x00000002) == 0x00000002)) { |
| to_bitField0_ |= 0x00000002; |
| } |
| if (ioDescriptorBuilder_ == null) { |
| result.ioDescriptor_ = ioDescriptor_; |
| } else { |
| result.ioDescriptor_ = ioDescriptorBuilder_.build(); |
| } |
| if (((from_bitField0_ & 0x00000004) == 0x00000004)) { |
| to_bitField0_ |= 0x00000004; |
| } |
| result.physicalEdgeCount_ = physicalEdgeCount_; |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto) { |
| return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto other) { |
| if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.getDefaultInstance()) return this; |
| if (other.hasConnectedVertexName()) { |
| bitField0_ |= 0x00000001; |
| connectedVertexName_ = other.connectedVertexName_; |
| onChanged(); |
| } |
| if (other.hasIoDescriptor()) { |
| mergeIoDescriptor(other.getIoDescriptor()); |
| } |
| if (other.hasPhysicalEdgeCount()) { |
| setPhysicalEdgeCount(other.getPhysicalEdgeCount()); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto parsedMessage = null; |
| try { |
| parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto) e.getUnfinishedMessage(); |
| throw e; |
| } finally { |
| if (parsedMessage != null) { |
| mergeFrom(parsedMessage); |
| } |
| } |
| return this; |
| } |
| private int bitField0_; |
| |
| // optional string connected_vertex_name = 1; |
| private java.lang.Object connectedVertexName_ = ""; |
| /** |
| * <code>optional string connected_vertex_name = 1;</code> |
| */ |
| public boolean hasConnectedVertexName() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional string connected_vertex_name = 1;</code> |
| */ |
| public java.lang.String getConnectedVertexName() { |
| java.lang.Object ref = connectedVertexName_; |
| if (!(ref instanceof java.lang.String)) { |
| java.lang.String s = ((com.google.protobuf.ByteString) ref) |
| .toStringUtf8(); |
| connectedVertexName_ = s; |
| return s; |
| } else { |
| return (java.lang.String) ref; |
| } |
| } |
| /** |
| * <code>optional string connected_vertex_name = 1;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getConnectedVertexNameBytes() { |
| java.lang.Object ref = connectedVertexName_; |
| if (ref instanceof String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| connectedVertexName_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| /** |
| * <code>optional string connected_vertex_name = 1;</code> |
| */ |
| public Builder setConnectedVertexName( |
| java.lang.String value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000001; |
| connectedVertexName_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string connected_vertex_name = 1;</code> |
| */ |
| public Builder clearConnectedVertexName() { |
| bitField0_ = (bitField0_ & ~0x00000001); |
| connectedVertexName_ = getDefaultInstance().getConnectedVertexName(); |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string connected_vertex_name = 1;</code> |
| */ |
| public Builder setConnectedVertexNameBytes( |
| com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000001; |
| connectedVertexName_ = value; |
| onChanged(); |
| return this; |
| } |
| |
| // optional .EntityDescriptorProto io_descriptor = 2; |
| private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto ioDescriptor_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.getDefaultInstance(); |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProtoOrBuilder> ioDescriptorBuilder_; |
| /** |
| * <code>optional .EntityDescriptorProto io_descriptor = 2;</code> |
| */ |
| public boolean hasIoDescriptor() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional .EntityDescriptorProto io_descriptor = 2;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto getIoDescriptor() { |
| if (ioDescriptorBuilder_ == null) { |
| return ioDescriptor_; |
| } else { |
| return ioDescriptorBuilder_.getMessage(); |
| } |
| } |
| /** |
| * <code>optional .EntityDescriptorProto io_descriptor = 2;</code> |
| */ |
| public Builder setIoDescriptor(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto value) { |
| if (ioDescriptorBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ioDescriptor_ = value; |
| onChanged(); |
| } else { |
| ioDescriptorBuilder_.setMessage(value); |
| } |
| bitField0_ |= 0x00000002; |
| return this; |
| } |
| /** |
| * <code>optional .EntityDescriptorProto io_descriptor = 2;</code> |
| */ |
| public Builder setIoDescriptor( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.Builder builderForValue) { |
| if (ioDescriptorBuilder_ == null) { |
| ioDescriptor_ = builderForValue.build(); |
| onChanged(); |
| } else { |
| ioDescriptorBuilder_.setMessage(builderForValue.build()); |
| } |
| bitField0_ |= 0x00000002; |
| return this; |
| } |
| /** |
| * <code>optional .EntityDescriptorProto io_descriptor = 2;</code> |
| */ |
| public Builder mergeIoDescriptor(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto value) { |
| if (ioDescriptorBuilder_ == null) { |
| if (((bitField0_ & 0x00000002) == 0x00000002) && |
| ioDescriptor_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.getDefaultInstance()) { |
| ioDescriptor_ = |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.newBuilder(ioDescriptor_).mergeFrom(value).buildPartial(); |
| } else { |
| ioDescriptor_ = value; |
| } |
| onChanged(); |
| } else { |
| ioDescriptorBuilder_.mergeFrom(value); |
| } |
| bitField0_ |= 0x00000002; |
| return this; |
| } |
| /** |
| * <code>optional .EntityDescriptorProto io_descriptor = 2;</code> |
| */ |
| public Builder clearIoDescriptor() { |
| if (ioDescriptorBuilder_ == null) { |
| ioDescriptor_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.getDefaultInstance(); |
| onChanged(); |
| } else { |
| ioDescriptorBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000002); |
| return this; |
| } |
| /** |
| * <code>optional .EntityDescriptorProto io_descriptor = 2;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.Builder getIoDescriptorBuilder() { |
| bitField0_ |= 0x00000002; |
| onChanged(); |
| return getIoDescriptorFieldBuilder().getBuilder(); |
| } |
| /** |
| * <code>optional .EntityDescriptorProto io_descriptor = 2;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProtoOrBuilder getIoDescriptorOrBuilder() { |
| if (ioDescriptorBuilder_ != null) { |
| return ioDescriptorBuilder_.getMessageOrBuilder(); |
| } else { |
| return ioDescriptor_; |
| } |
| } |
| /** |
| * <code>optional .EntityDescriptorProto io_descriptor = 2;</code> |
| */ |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProtoOrBuilder> |
| getIoDescriptorFieldBuilder() { |
| if (ioDescriptorBuilder_ == null) { |
| ioDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProtoOrBuilder>( |
| ioDescriptor_, |
| getParentForChildren(), |
| isClean()); |
| ioDescriptor_ = null; |
| } |
| return ioDescriptorBuilder_; |
| } |
| |
| // optional int32 physical_edge_count = 3; |
| private int physicalEdgeCount_ ; |
| /** |
| * <code>optional int32 physical_edge_count = 3;</code> |
| */ |
| public boolean hasPhysicalEdgeCount() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| /** |
| * <code>optional int32 physical_edge_count = 3;</code> |
| */ |
| public int getPhysicalEdgeCount() { |
| return physicalEdgeCount_; |
| } |
| /** |
| * <code>optional int32 physical_edge_count = 3;</code> |
| */ |
| public Builder setPhysicalEdgeCount(int value) { |
| bitField0_ |= 0x00000004; |
| physicalEdgeCount_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional int32 physical_edge_count = 3;</code> |
| */ |
| public Builder clearPhysicalEdgeCount() { |
| bitField0_ = (bitField0_ & ~0x00000004); |
| physicalEdgeCount_ = 0; |
| onChanged(); |
| return this; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:IOSpecProto) |
| } |
| |
| static { |
| defaultInstance = new IOSpecProto(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:IOSpecProto) |
| } |
| |
| public interface GroupInputSpecProtoOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // optional string group_name = 1; |
| /** |
| * <code>optional string group_name = 1;</code> |
| */ |
| boolean hasGroupName(); |
| /** |
| * <code>optional string group_name = 1;</code> |
| */ |
| java.lang.String getGroupName(); |
| /** |
| * <code>optional string group_name = 1;</code> |
| */ |
| com.google.protobuf.ByteString |
| getGroupNameBytes(); |
| |
| // repeated string group_vertices = 2; |
| /** |
| * <code>repeated string group_vertices = 2;</code> |
| */ |
| java.util.List<java.lang.String> |
| getGroupVerticesList(); |
| /** |
| * <code>repeated string group_vertices = 2;</code> |
| */ |
| int getGroupVerticesCount(); |
| /** |
| * <code>repeated string group_vertices = 2;</code> |
| */ |
| java.lang.String getGroupVertices(int index); |
| /** |
| * <code>repeated string group_vertices = 2;</code> |
| */ |
| com.google.protobuf.ByteString |
| getGroupVerticesBytes(int index); |
| |
| // optional .EntityDescriptorProto merged_input_descriptor = 3; |
| /** |
| * <code>optional .EntityDescriptorProto merged_input_descriptor = 3;</code> |
| */ |
| boolean hasMergedInputDescriptor(); |
| /** |
| * <code>optional .EntityDescriptorProto merged_input_descriptor = 3;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto getMergedInputDescriptor(); |
| /** |
| * <code>optional .EntityDescriptorProto merged_input_descriptor = 3;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProtoOrBuilder getMergedInputDescriptorOrBuilder(); |
| } |
| /** |
| * Protobuf type {@code GroupInputSpecProto} |
| */ |
| public static final class GroupInputSpecProto extends |
| com.google.protobuf.GeneratedMessage |
| implements GroupInputSpecProtoOrBuilder { |
| // Use GroupInputSpecProto.newBuilder() to construct. |
| private GroupInputSpecProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) { |
| super(builder); |
| this.unknownFields = builder.getUnknownFields(); |
| } |
| private GroupInputSpecProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } |
| |
| private static final GroupInputSpecProto defaultInstance; |
| public static GroupInputSpecProto getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public GroupInputSpecProto getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| private final com.google.protobuf.UnknownFieldSet unknownFields; |
| @java.lang.Override |
| public final com.google.protobuf.UnknownFieldSet |
| getUnknownFields() { |
| return this.unknownFields; |
| } |
| private GroupInputSpecProto( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| initFields(); |
| int mutable_bitField0_ = 0; |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder(); |
| try { |
| boolean done = false; |
| while (!done) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| done = true; |
| break; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| done = true; |
| } |
| break; |
| } |
| case 10: { |
| bitField0_ |= 0x00000001; |
| groupName_ = input.readBytes(); |
| break; |
| } |
| case 18: { |
| if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { |
| groupVertices_ = new com.google.protobuf.LazyStringArrayList(); |
| mutable_bitField0_ |= 0x00000002; |
| } |
| groupVertices_.add(input.readBytes()); |
| break; |
| } |
| case 26: { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.Builder subBuilder = null; |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| subBuilder = mergedInputDescriptor_.toBuilder(); |
| } |
| mergedInputDescriptor_ = input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.PARSER, extensionRegistry); |
| if (subBuilder != null) { |
| subBuilder.mergeFrom(mergedInputDescriptor_); |
| mergedInputDescriptor_ = subBuilder.buildPartial(); |
| } |
| bitField0_ |= 0x00000002; |
| break; |
| } |
| } |
| } |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| throw e.setUnfinishedMessage(this); |
| } catch (java.io.IOException e) { |
| throw new com.google.protobuf.InvalidProtocolBufferException( |
| e.getMessage()).setUnfinishedMessage(this); |
| } finally { |
| if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { |
| groupVertices_ = new com.google.protobuf.UnmodifiableLazyStringList(groupVertices_); |
| } |
| this.unknownFields = unknownFields.build(); |
| makeExtensionsImmutable(); |
| } |
| } |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_GroupInputSpecProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_GroupInputSpecProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.Builder.class); |
| } |
| |
| public static com.google.protobuf.Parser<GroupInputSpecProto> PARSER = |
| new com.google.protobuf.AbstractParser<GroupInputSpecProto>() { |
| public GroupInputSpecProto parsePartialFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return new GroupInputSpecProto(input, extensionRegistry); |
| } |
| }; |
| |
| @java.lang.Override |
| public com.google.protobuf.Parser<GroupInputSpecProto> getParserForType() { |
| return PARSER; |
| } |
| |
| private int bitField0_; |
| // optional string group_name = 1; |
| public static final int GROUP_NAME_FIELD_NUMBER = 1; |
| private java.lang.Object groupName_; |
| /** |
| * <code>optional string group_name = 1;</code> |
| */ |
| public boolean hasGroupName() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional string group_name = 1;</code> |
| */ |
| public java.lang.String getGroupName() { |
| java.lang.Object ref = groupName_; |
| if (ref instanceof java.lang.String) { |
| return (java.lang.String) ref; |
| } else { |
| com.google.protobuf.ByteString bs = |
| (com.google.protobuf.ByteString) ref; |
| java.lang.String s = bs.toStringUtf8(); |
| if (bs.isValidUtf8()) { |
| groupName_ = s; |
| } |
| return s; |
| } |
| } |
| /** |
| * <code>optional string group_name = 1;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getGroupNameBytes() { |
| java.lang.Object ref = groupName_; |
| if (ref instanceof java.lang.String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| groupName_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| |
| // repeated string group_vertices = 2; |
| public static final int GROUP_VERTICES_FIELD_NUMBER = 2; |
| private com.google.protobuf.LazyStringList groupVertices_; |
| /** |
| * <code>repeated string group_vertices = 2;</code> |
| */ |
| public java.util.List<java.lang.String> |
| getGroupVerticesList() { |
| return groupVertices_; |
| } |
| /** |
| * <code>repeated string group_vertices = 2;</code> |
| */ |
| public int getGroupVerticesCount() { |
| return groupVertices_.size(); |
| } |
| /** |
| * <code>repeated string group_vertices = 2;</code> |
| */ |
| public java.lang.String getGroupVertices(int index) { |
| return groupVertices_.get(index); |
| } |
| /** |
| * <code>repeated string group_vertices = 2;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getGroupVerticesBytes(int index) { |
| return groupVertices_.getByteString(index); |
| } |
| |
| // optional .EntityDescriptorProto merged_input_descriptor = 3; |
| public static final int MERGED_INPUT_DESCRIPTOR_FIELD_NUMBER = 3; |
| private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto mergedInputDescriptor_; |
| /** |
| * <code>optional .EntityDescriptorProto merged_input_descriptor = 3;</code> |
| */ |
| public boolean hasMergedInputDescriptor() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional .EntityDescriptorProto merged_input_descriptor = 3;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto getMergedInputDescriptor() { |
| return mergedInputDescriptor_; |
| } |
| /** |
| * <code>optional .EntityDescriptorProto merged_input_descriptor = 3;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProtoOrBuilder getMergedInputDescriptorOrBuilder() { |
| return mergedInputDescriptor_; |
| } |
| |
| private void initFields() { |
| groupName_ = ""; |
| groupVertices_ = com.google.protobuf.LazyStringArrayList.EMPTY; |
| mergedInputDescriptor_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.getDefaultInstance(); |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeBytes(1, getGroupNameBytes()); |
| } |
| for (int i = 0; i < groupVertices_.size(); i++) { |
| output.writeBytes(2, groupVertices_.getByteString(i)); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| output.writeMessage(3, mergedInputDescriptor_); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(1, getGroupNameBytes()); |
| } |
| { |
| int dataSize = 0; |
| for (int i = 0; i < groupVertices_.size(); i++) { |
| dataSize += com.google.protobuf.CodedOutputStream |
| .computeBytesSizeNoTag(groupVertices_.getByteString(i)); |
| } |
| size += dataSize; |
| size += 1 * getGroupVerticesList().size(); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(3, mergedInputDescriptor_); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto) obj; |
| |
| boolean result = true; |
| result = result && (hasGroupName() == other.hasGroupName()); |
| if (hasGroupName()) { |
| result = result && getGroupName() |
| .equals(other.getGroupName()); |
| } |
| result = result && getGroupVerticesList() |
| .equals(other.getGroupVerticesList()); |
| result = result && (hasMergedInputDescriptor() == other.hasMergedInputDescriptor()); |
| if (hasMergedInputDescriptor()) { |
| result = result && getMergedInputDescriptor() |
| .equals(other.getMergedInputDescriptor()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| private int memoizedHashCode = 0; |
| @java.lang.Override |
| public int hashCode() { |
| if (memoizedHashCode != 0) { |
| return memoizedHashCode; |
| } |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasGroupName()) { |
| hash = (37 * hash) + GROUP_NAME_FIELD_NUMBER; |
| hash = (53 * hash) + getGroupName().hashCode(); |
| } |
| if (getGroupVerticesCount() > 0) { |
| hash = (37 * hash) + GROUP_VERTICES_FIELD_NUMBER; |
| hash = (53 * hash) + getGroupVerticesList().hashCode(); |
| } |
| if (hasMergedInputDescriptor()) { |
| hash = (37 * hash) + MERGED_INPUT_DESCRIPTOR_FIELD_NUMBER; |
| hash = (53 * hash) + getMergedInputDescriptor().hashCode(); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| memoizedHashCode = hash; |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| /** |
| * Protobuf type {@code GroupInputSpecProto} |
| */ |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProtoOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_GroupInputSpecProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_GroupInputSpecProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.Builder.class); |
| } |
| |
| // Construct using org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| getMergedInputDescriptorFieldBuilder(); |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| groupName_ = ""; |
| bitField0_ = (bitField0_ & ~0x00000001); |
| groupVertices_ = com.google.protobuf.LazyStringArrayList.EMPTY; |
| bitField0_ = (bitField0_ & ~0x00000002); |
| if (mergedInputDescriptorBuilder_ == null) { |
| mergedInputDescriptor_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.getDefaultInstance(); |
| } else { |
| mergedInputDescriptorBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000004); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_GroupInputSpecProto_descriptor; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto getDefaultInstanceForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto build() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto buildPartial() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| result.groupName_ = groupName_; |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| groupVertices_ = new com.google.protobuf.UnmodifiableLazyStringList( |
| groupVertices_); |
| bitField0_ = (bitField0_ & ~0x00000002); |
| } |
| result.groupVertices_ = groupVertices_; |
| if (((from_bitField0_ & 0x00000004) == 0x00000004)) { |
| to_bitField0_ |= 0x00000002; |
| } |
| if (mergedInputDescriptorBuilder_ == null) { |
| result.mergedInputDescriptor_ = mergedInputDescriptor_; |
| } else { |
| result.mergedInputDescriptor_ = mergedInputDescriptorBuilder_.build(); |
| } |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto) { |
| return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto other) { |
| if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.getDefaultInstance()) return this; |
| if (other.hasGroupName()) { |
| bitField0_ |= 0x00000001; |
| groupName_ = other.groupName_; |
| onChanged(); |
| } |
| if (!other.groupVertices_.isEmpty()) { |
| if (groupVertices_.isEmpty()) { |
| groupVertices_ = other.groupVertices_; |
| bitField0_ = (bitField0_ & ~0x00000002); |
| } else { |
| ensureGroupVerticesIsMutable(); |
| groupVertices_.addAll(other.groupVertices_); |
| } |
| onChanged(); |
| } |
| if (other.hasMergedInputDescriptor()) { |
| mergeMergedInputDescriptor(other.getMergedInputDescriptor()); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto parsedMessage = null; |
| try { |
| parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto) e.getUnfinishedMessage(); |
| throw e; |
| } finally { |
| if (parsedMessage != null) { |
| mergeFrom(parsedMessage); |
| } |
| } |
| return this; |
| } |
| private int bitField0_; |
| |
| // optional string group_name = 1; |
| private java.lang.Object groupName_ = ""; |
| /** |
| * <code>optional string group_name = 1;</code> |
| */ |
| public boolean hasGroupName() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional string group_name = 1;</code> |
| */ |
| public java.lang.String getGroupName() { |
| java.lang.Object ref = groupName_; |
| if (!(ref instanceof java.lang.String)) { |
| java.lang.String s = ((com.google.protobuf.ByteString) ref) |
| .toStringUtf8(); |
| groupName_ = s; |
| return s; |
| } else { |
| return (java.lang.String) ref; |
| } |
| } |
| /** |
| * <code>optional string group_name = 1;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getGroupNameBytes() { |
| java.lang.Object ref = groupName_; |
| if (ref instanceof String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| groupName_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| /** |
| * <code>optional string group_name = 1;</code> |
| */ |
| public Builder setGroupName( |
| java.lang.String value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000001; |
| groupName_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string group_name = 1;</code> |
| */ |
| public Builder clearGroupName() { |
| bitField0_ = (bitField0_ & ~0x00000001); |
| groupName_ = getDefaultInstance().getGroupName(); |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string group_name = 1;</code> |
| */ |
| public Builder setGroupNameBytes( |
| com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000001; |
| groupName_ = value; |
| onChanged(); |
| return this; |
| } |
| |
| // repeated string group_vertices = 2; |
| private com.google.protobuf.LazyStringList groupVertices_ = com.google.protobuf.LazyStringArrayList.EMPTY; |
| private void ensureGroupVerticesIsMutable() { |
| if (!((bitField0_ & 0x00000002) == 0x00000002)) { |
| groupVertices_ = new com.google.protobuf.LazyStringArrayList(groupVertices_); |
| bitField0_ |= 0x00000002; |
| } |
| } |
| /** |
| * <code>repeated string group_vertices = 2;</code> |
| */ |
| public java.util.List<java.lang.String> |
| getGroupVerticesList() { |
| return java.util.Collections.unmodifiableList(groupVertices_); |
| } |
| /** |
| * <code>repeated string group_vertices = 2;</code> |
| */ |
| public int getGroupVerticesCount() { |
| return groupVertices_.size(); |
| } |
| /** |
| * <code>repeated string group_vertices = 2;</code> |
| */ |
| public java.lang.String getGroupVertices(int index) { |
| return groupVertices_.get(index); |
| } |
| /** |
| * <code>repeated string group_vertices = 2;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getGroupVerticesBytes(int index) { |
| return groupVertices_.getByteString(index); |
| } |
| /** |
| * <code>repeated string group_vertices = 2;</code> |
| */ |
| public Builder setGroupVertices( |
| int index, java.lang.String value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureGroupVerticesIsMutable(); |
| groupVertices_.set(index, value); |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>repeated string group_vertices = 2;</code> |
| */ |
| public Builder addGroupVertices( |
| java.lang.String value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureGroupVerticesIsMutable(); |
| groupVertices_.add(value); |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>repeated string group_vertices = 2;</code> |
| */ |
| public Builder addAllGroupVertices( |
| java.lang.Iterable<java.lang.String> values) { |
| ensureGroupVerticesIsMutable(); |
| super.addAll(values, groupVertices_); |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>repeated string group_vertices = 2;</code> |
| */ |
| public Builder clearGroupVertices() { |
| groupVertices_ = com.google.protobuf.LazyStringArrayList.EMPTY; |
| bitField0_ = (bitField0_ & ~0x00000002); |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>repeated string group_vertices = 2;</code> |
| */ |
| public Builder addGroupVerticesBytes( |
| com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureGroupVerticesIsMutable(); |
| groupVertices_.add(value); |
| onChanged(); |
| return this; |
| } |
| |
| // optional .EntityDescriptorProto merged_input_descriptor = 3; |
| private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto mergedInputDescriptor_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.getDefaultInstance(); |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProtoOrBuilder> mergedInputDescriptorBuilder_; |
| /** |
| * <code>optional .EntityDescriptorProto merged_input_descriptor = 3;</code> |
| */ |
| public boolean hasMergedInputDescriptor() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| /** |
| * <code>optional .EntityDescriptorProto merged_input_descriptor = 3;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto getMergedInputDescriptor() { |
| if (mergedInputDescriptorBuilder_ == null) { |
| return mergedInputDescriptor_; |
| } else { |
| return mergedInputDescriptorBuilder_.getMessage(); |
| } |
| } |
| /** |
| * <code>optional .EntityDescriptorProto merged_input_descriptor = 3;</code> |
| */ |
| public Builder setMergedInputDescriptor(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto value) { |
| if (mergedInputDescriptorBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| mergedInputDescriptor_ = value; |
| onChanged(); |
| } else { |
| mergedInputDescriptorBuilder_.setMessage(value); |
| } |
| bitField0_ |= 0x00000004; |
| return this; |
| } |
| /** |
| * <code>optional .EntityDescriptorProto merged_input_descriptor = 3;</code> |
| */ |
| public Builder setMergedInputDescriptor( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.Builder builderForValue) { |
| if (mergedInputDescriptorBuilder_ == null) { |
| mergedInputDescriptor_ = builderForValue.build(); |
| onChanged(); |
| } else { |
| mergedInputDescriptorBuilder_.setMessage(builderForValue.build()); |
| } |
| bitField0_ |= 0x00000004; |
| return this; |
| } |
| /** |
| * <code>optional .EntityDescriptorProto merged_input_descriptor = 3;</code> |
| */ |
| public Builder mergeMergedInputDescriptor(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto value) { |
| if (mergedInputDescriptorBuilder_ == null) { |
| if (((bitField0_ & 0x00000004) == 0x00000004) && |
| mergedInputDescriptor_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.getDefaultInstance()) { |
| mergedInputDescriptor_ = |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.newBuilder(mergedInputDescriptor_).mergeFrom(value).buildPartial(); |
| } else { |
| mergedInputDescriptor_ = value; |
| } |
| onChanged(); |
| } else { |
| mergedInputDescriptorBuilder_.mergeFrom(value); |
| } |
| bitField0_ |= 0x00000004; |
| return this; |
| } |
| /** |
| * <code>optional .EntityDescriptorProto merged_input_descriptor = 3;</code> |
| */ |
| public Builder clearMergedInputDescriptor() { |
| if (mergedInputDescriptorBuilder_ == null) { |
| mergedInputDescriptor_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.getDefaultInstance(); |
| onChanged(); |
| } else { |
| mergedInputDescriptorBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000004); |
| return this; |
| } |
| /** |
| * <code>optional .EntityDescriptorProto merged_input_descriptor = 3;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.Builder getMergedInputDescriptorBuilder() { |
| bitField0_ |= 0x00000004; |
| onChanged(); |
| return getMergedInputDescriptorFieldBuilder().getBuilder(); |
| } |
| /** |
| * <code>optional .EntityDescriptorProto merged_input_descriptor = 3;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProtoOrBuilder getMergedInputDescriptorOrBuilder() { |
| if (mergedInputDescriptorBuilder_ != null) { |
| return mergedInputDescriptorBuilder_.getMessageOrBuilder(); |
| } else { |
| return mergedInputDescriptor_; |
| } |
| } |
| /** |
| * <code>optional .EntityDescriptorProto merged_input_descriptor = 3;</code> |
| */ |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProtoOrBuilder> |
| getMergedInputDescriptorFieldBuilder() { |
| if (mergedInputDescriptorBuilder_ == null) { |
| mergedInputDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProtoOrBuilder>( |
| mergedInputDescriptor_, |
| getParentForChildren(), |
| isClean()); |
| mergedInputDescriptor_ = null; |
| } |
| return mergedInputDescriptorBuilder_; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:GroupInputSpecProto) |
| } |
| |
| static { |
| defaultInstance = new GroupInputSpecProto(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:GroupInputSpecProto) |
| } |
| |
| public interface SignableVertexSpecOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // optional string user = 1; |
| /** |
| * <code>optional string user = 1;</code> |
| */ |
| boolean hasUser(); |
| /** |
| * <code>optional string user = 1;</code> |
| */ |
| java.lang.String getUser(); |
| /** |
| * <code>optional string user = 1;</code> |
| */ |
| com.google.protobuf.ByteString |
| getUserBytes(); |
| |
| // optional int64 signatureKeyId = 2; |
| /** |
| * <code>optional int64 signatureKeyId = 2;</code> |
| */ |
| boolean hasSignatureKeyId(); |
| /** |
| * <code>optional int64 signatureKeyId = 2;</code> |
| */ |
| long getSignatureKeyId(); |
| |
| // optional .QueryIdentifierProto query_identifier = 3; |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 3;</code> |
| */ |
| boolean hasQueryIdentifier(); |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 3;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier(); |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 3;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder(); |
| |
| // optional string hive_query_id = 4; |
| /** |
| * <code>optional string hive_query_id = 4;</code> |
| */ |
| boolean hasHiveQueryId(); |
| /** |
| * <code>optional string hive_query_id = 4;</code> |
| */ |
| java.lang.String getHiveQueryId(); |
| /** |
| * <code>optional string hive_query_id = 4;</code> |
| */ |
| com.google.protobuf.ByteString |
| getHiveQueryIdBytes(); |
| |
| // optional string dag_name = 5; |
| /** |
| * <code>optional string dag_name = 5;</code> |
| * |
| * <pre> |
| * Display names cannot be modified by the client for now. If needed, they should be sent to HS2 who will put them here. |
| * </pre> |
| */ |
| boolean hasDagName(); |
| /** |
| * <code>optional string dag_name = 5;</code> |
| * |
| * <pre> |
| * Display names cannot be modified by the client for now. If needed, they should be sent to HS2 who will put them here. |
| * </pre> |
| */ |
| java.lang.String getDagName(); |
| /** |
| * <code>optional string dag_name = 5;</code> |
| * |
| * <pre> |
| * Display names cannot be modified by the client for now. If needed, they should be sent to HS2 who will put them here. |
| * </pre> |
| */ |
| com.google.protobuf.ByteString |
| getDagNameBytes(); |
| |
| // optional string vertex_name = 6; |
| /** |
| * <code>optional string vertex_name = 6;</code> |
| */ |
| boolean hasVertexName(); |
| /** |
| * <code>optional string vertex_name = 6;</code> |
| */ |
| java.lang.String getVertexName(); |
| /** |
| * <code>optional string vertex_name = 6;</code> |
| */ |
| com.google.protobuf.ByteString |
| getVertexNameBytes(); |
| |
| // optional int32 vertex_index = 7; |
| /** |
| * <code>optional int32 vertex_index = 7;</code> |
| */ |
| boolean hasVertexIndex(); |
| /** |
| * <code>optional int32 vertex_index = 7;</code> |
| */ |
| int getVertexIndex(); |
| |
| // optional string token_identifier = 8; |
| /** |
| * <code>optional string token_identifier = 8;</code> |
| * |
| * <pre> |
| * The core vertex stuff |
| * </pre> |
| */ |
| boolean hasTokenIdentifier(); |
| /** |
| * <code>optional string token_identifier = 8;</code> |
| * |
| * <pre> |
| * The core vertex stuff |
| * </pre> |
| */ |
| java.lang.String getTokenIdentifier(); |
| /** |
| * <code>optional string token_identifier = 8;</code> |
| * |
| * <pre> |
| * The core vertex stuff |
| * </pre> |
| */ |
| com.google.protobuf.ByteString |
| getTokenIdentifierBytes(); |
| |
| // optional .EntityDescriptorProto processor_descriptor = 9; |
| /** |
| * <code>optional .EntityDescriptorProto processor_descriptor = 9;</code> |
| */ |
| boolean hasProcessorDescriptor(); |
| /** |
| * <code>optional .EntityDescriptorProto processor_descriptor = 9;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto getProcessorDescriptor(); |
| /** |
| * <code>optional .EntityDescriptorProto processor_descriptor = 9;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProtoOrBuilder getProcessorDescriptorOrBuilder(); |
| |
| // repeated .IOSpecProto input_specs = 10; |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto> |
| getInputSpecsList(); |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto getInputSpecs(int index); |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| int getInputSpecsCount(); |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| java.util.List<? extends org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder> |
| getInputSpecsOrBuilderList(); |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder getInputSpecsOrBuilder( |
| int index); |
| |
| // repeated .IOSpecProto output_specs = 11; |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto> |
| getOutputSpecsList(); |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto getOutputSpecs(int index); |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| int getOutputSpecsCount(); |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| java.util.List<? extends org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder> |
| getOutputSpecsOrBuilderList(); |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder getOutputSpecsOrBuilder( |
| int index); |
| |
| // repeated .GroupInputSpecProto grouped_input_specs = 12; |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto> |
| getGroupedInputSpecsList(); |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto getGroupedInputSpecs(int index); |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| int getGroupedInputSpecsCount(); |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| java.util.List<? extends org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProtoOrBuilder> |
| getGroupedInputSpecsOrBuilderList(); |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProtoOrBuilder getGroupedInputSpecsOrBuilder( |
| int index); |
| |
| // optional int32 vertex_parallelism = 13; |
| /** |
| * <code>optional int32 vertex_parallelism = 13;</code> |
| * |
| * <pre> |
| * An internal field required for Tez. |
| * </pre> |
| */ |
| boolean hasVertexParallelism(); |
| /** |
| * <code>optional int32 vertex_parallelism = 13;</code> |
| * |
| * <pre> |
| * An internal field required for Tez. |
| * </pre> |
| */ |
| int getVertexParallelism(); |
| } |
| /** |
| * Protobuf type {@code SignableVertexSpec} |
| * |
| * <pre> |
| * The part of SubmitWork that can be signed |
| * </pre> |
| */ |
| public static final class SignableVertexSpec extends |
| com.google.protobuf.GeneratedMessage |
| implements SignableVertexSpecOrBuilder { |
| // Use SignableVertexSpec.newBuilder() to construct. |
| private SignableVertexSpec(com.google.protobuf.GeneratedMessage.Builder<?> builder) { |
| super(builder); |
| this.unknownFields = builder.getUnknownFields(); |
| } |
| private SignableVertexSpec(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } |
| |
| private static final SignableVertexSpec defaultInstance; |
| public static SignableVertexSpec getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public SignableVertexSpec getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| private final com.google.protobuf.UnknownFieldSet unknownFields; |
| @java.lang.Override |
| public final com.google.protobuf.UnknownFieldSet |
| getUnknownFields() { |
| return this.unknownFields; |
| } |
| private SignableVertexSpec( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| initFields(); |
| int mutable_bitField0_ = 0; |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder(); |
| try { |
| boolean done = false; |
| while (!done) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| done = true; |
| break; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| done = true; |
| } |
| break; |
| } |
| case 10: { |
| bitField0_ |= 0x00000001; |
| user_ = input.readBytes(); |
| break; |
| } |
| case 16: { |
| bitField0_ |= 0x00000002; |
| signatureKeyId_ = input.readInt64(); |
| break; |
| } |
| case 26: { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder subBuilder = null; |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| subBuilder = queryIdentifier_.toBuilder(); |
| } |
| queryIdentifier_ = input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.PARSER, extensionRegistry); |
| if (subBuilder != null) { |
| subBuilder.mergeFrom(queryIdentifier_); |
| queryIdentifier_ = subBuilder.buildPartial(); |
| } |
| bitField0_ |= 0x00000004; |
| break; |
| } |
| case 34: { |
| bitField0_ |= 0x00000008; |
| hiveQueryId_ = input.readBytes(); |
| break; |
| } |
| case 42: { |
| bitField0_ |= 0x00000010; |
| dagName_ = input.readBytes(); |
| break; |
| } |
| case 50: { |
| bitField0_ |= 0x00000020; |
| vertexName_ = input.readBytes(); |
| break; |
| } |
| case 56: { |
| bitField0_ |= 0x00000040; |
| vertexIndex_ = input.readInt32(); |
| break; |
| } |
| case 66: { |
| bitField0_ |= 0x00000080; |
| tokenIdentifier_ = input.readBytes(); |
| break; |
| } |
| case 74: { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.Builder subBuilder = null; |
| if (((bitField0_ & 0x00000100) == 0x00000100)) { |
| subBuilder = processorDescriptor_.toBuilder(); |
| } |
| processorDescriptor_ = input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.PARSER, extensionRegistry); |
| if (subBuilder != null) { |
| subBuilder.mergeFrom(processorDescriptor_); |
| processorDescriptor_ = subBuilder.buildPartial(); |
| } |
| bitField0_ |= 0x00000100; |
| break; |
| } |
| case 82: { |
| if (!((mutable_bitField0_ & 0x00000200) == 0x00000200)) { |
| inputSpecs_ = new java.util.ArrayList<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto>(); |
| mutable_bitField0_ |= 0x00000200; |
| } |
| inputSpecs_.add(input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.PARSER, extensionRegistry)); |
| break; |
| } |
| case 90: { |
| if (!((mutable_bitField0_ & 0x00000400) == 0x00000400)) { |
| outputSpecs_ = new java.util.ArrayList<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto>(); |
| mutable_bitField0_ |= 0x00000400; |
| } |
| outputSpecs_.add(input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.PARSER, extensionRegistry)); |
| break; |
| } |
| case 98: { |
| if (!((mutable_bitField0_ & 0x00000800) == 0x00000800)) { |
| groupedInputSpecs_ = new java.util.ArrayList<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto>(); |
| mutable_bitField0_ |= 0x00000800; |
| } |
| groupedInputSpecs_.add(input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.PARSER, extensionRegistry)); |
| break; |
| } |
| case 104: { |
| bitField0_ |= 0x00000200; |
| vertexParallelism_ = input.readInt32(); |
| break; |
| } |
| } |
| } |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| throw e.setUnfinishedMessage(this); |
| } catch (java.io.IOException e) { |
| throw new com.google.protobuf.InvalidProtocolBufferException( |
| e.getMessage()).setUnfinishedMessage(this); |
| } finally { |
| if (((mutable_bitField0_ & 0x00000200) == 0x00000200)) { |
| inputSpecs_ = java.util.Collections.unmodifiableList(inputSpecs_); |
| } |
| if (((mutable_bitField0_ & 0x00000400) == 0x00000400)) { |
| outputSpecs_ = java.util.Collections.unmodifiableList(outputSpecs_); |
| } |
| if (((mutable_bitField0_ & 0x00000800) == 0x00000800)) { |
| groupedInputSpecs_ = java.util.Collections.unmodifiableList(groupedInputSpecs_); |
| } |
| this.unknownFields = unknownFields.build(); |
| makeExtensionsImmutable(); |
| } |
| } |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SignableVertexSpec_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SignableVertexSpec_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec.Builder.class); |
| } |
| |
| public static com.google.protobuf.Parser<SignableVertexSpec> PARSER = |
| new com.google.protobuf.AbstractParser<SignableVertexSpec>() { |
| public SignableVertexSpec parsePartialFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return new SignableVertexSpec(input, extensionRegistry); |
| } |
| }; |
| |
| @java.lang.Override |
| public com.google.protobuf.Parser<SignableVertexSpec> getParserForType() { |
| return PARSER; |
| } |
| |
| private int bitField0_; |
| // optional string user = 1; |
| public static final int USER_FIELD_NUMBER = 1; |
| private java.lang.Object user_; |
| /** |
| * <code>optional string user = 1;</code> |
| */ |
| public boolean hasUser() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional string user = 1;</code> |
| */ |
| public java.lang.String getUser() { |
| java.lang.Object ref = user_; |
| if (ref instanceof java.lang.String) { |
| return (java.lang.String) ref; |
| } else { |
| com.google.protobuf.ByteString bs = |
| (com.google.protobuf.ByteString) ref; |
| java.lang.String s = bs.toStringUtf8(); |
| if (bs.isValidUtf8()) { |
| user_ = s; |
| } |
| return s; |
| } |
| } |
| /** |
| * <code>optional string user = 1;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getUserBytes() { |
| java.lang.Object ref = user_; |
| if (ref instanceof java.lang.String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| user_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| |
| // optional int64 signatureKeyId = 2; |
| public static final int SIGNATUREKEYID_FIELD_NUMBER = 2; |
| private long signatureKeyId_; |
| /** |
| * <code>optional int64 signatureKeyId = 2;</code> |
| */ |
| public boolean hasSignatureKeyId() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional int64 signatureKeyId = 2;</code> |
| */ |
| public long getSignatureKeyId() { |
| return signatureKeyId_; |
| } |
| |
| // optional .QueryIdentifierProto query_identifier = 3; |
| public static final int QUERY_IDENTIFIER_FIELD_NUMBER = 3; |
| private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_; |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 3;</code> |
| */ |
| public boolean hasQueryIdentifier() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 3;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() { |
| return queryIdentifier_; |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 3;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder() { |
| return queryIdentifier_; |
| } |
| |
| // optional string hive_query_id = 4; |
| public static final int HIVE_QUERY_ID_FIELD_NUMBER = 4; |
| private java.lang.Object hiveQueryId_; |
| /** |
| * <code>optional string hive_query_id = 4;</code> |
| */ |
| public boolean hasHiveQueryId() { |
| return ((bitField0_ & 0x00000008) == 0x00000008); |
| } |
| /** |
| * <code>optional string hive_query_id = 4;</code> |
| */ |
| public java.lang.String getHiveQueryId() { |
| java.lang.Object ref = hiveQueryId_; |
| if (ref instanceof java.lang.String) { |
| return (java.lang.String) ref; |
| } else { |
| com.google.protobuf.ByteString bs = |
| (com.google.protobuf.ByteString) ref; |
| java.lang.String s = bs.toStringUtf8(); |
| if (bs.isValidUtf8()) { |
| hiveQueryId_ = s; |
| } |
| return s; |
| } |
| } |
| /** |
| * <code>optional string hive_query_id = 4;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getHiveQueryIdBytes() { |
| java.lang.Object ref = hiveQueryId_; |
| if (ref instanceof java.lang.String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| hiveQueryId_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| |
| // optional string dag_name = 5; |
| public static final int DAG_NAME_FIELD_NUMBER = 5; |
| private java.lang.Object dagName_; |
| /** |
| * <code>optional string dag_name = 5;</code> |
| * |
| * <pre> |
| * Display names cannot be modified by the client for now. If needed, they should be sent to HS2 who will put them here. |
| * </pre> |
| */ |
| public boolean hasDagName() { |
| return ((bitField0_ & 0x00000010) == 0x00000010); |
| } |
| /** |
| * <code>optional string dag_name = 5;</code> |
| * |
| * <pre> |
| * Display names cannot be modified by the client for now. If needed, they should be sent to HS2 who will put them here. |
| * </pre> |
| */ |
| public java.lang.String getDagName() { |
| java.lang.Object ref = dagName_; |
| if (ref instanceof java.lang.String) { |
| return (java.lang.String) ref; |
| } else { |
| com.google.protobuf.ByteString bs = |
| (com.google.protobuf.ByteString) ref; |
| java.lang.String s = bs.toStringUtf8(); |
| if (bs.isValidUtf8()) { |
| dagName_ = s; |
| } |
| return s; |
| } |
| } |
| /** |
| * <code>optional string dag_name = 5;</code> |
| * |
| * <pre> |
| * Display names cannot be modified by the client for now. If needed, they should be sent to HS2 who will put them here. |
| * </pre> |
| */ |
| public com.google.protobuf.ByteString |
| getDagNameBytes() { |
| java.lang.Object ref = dagName_; |
| if (ref instanceof java.lang.String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| dagName_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| |
| // optional string vertex_name = 6; |
| public static final int VERTEX_NAME_FIELD_NUMBER = 6; |
| private java.lang.Object vertexName_; |
| /** |
| * <code>optional string vertex_name = 6;</code> |
| */ |
| public boolean hasVertexName() { |
| return ((bitField0_ & 0x00000020) == 0x00000020); |
| } |
| /** |
| * <code>optional string vertex_name = 6;</code> |
| */ |
| public java.lang.String getVertexName() { |
| java.lang.Object ref = vertexName_; |
| if (ref instanceof java.lang.String) { |
| return (java.lang.String) ref; |
| } else { |
| com.google.protobuf.ByteString bs = |
| (com.google.protobuf.ByteString) ref; |
| java.lang.String s = bs.toStringUtf8(); |
| if (bs.isValidUtf8()) { |
| vertexName_ = s; |
| } |
| return s; |
| } |
| } |
| /** |
| * <code>optional string vertex_name = 6;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getVertexNameBytes() { |
| java.lang.Object ref = vertexName_; |
| if (ref instanceof java.lang.String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| vertexName_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| |
| // optional int32 vertex_index = 7; |
| public static final int VERTEX_INDEX_FIELD_NUMBER = 7; |
| private int vertexIndex_; |
| /** |
| * <code>optional int32 vertex_index = 7;</code> |
| */ |
| public boolean hasVertexIndex() { |
| return ((bitField0_ & 0x00000040) == 0x00000040); |
| } |
| /** |
| * <code>optional int32 vertex_index = 7;</code> |
| */ |
| public int getVertexIndex() { |
| return vertexIndex_; |
| } |
| |
| // optional string token_identifier = 8; |
| public static final int TOKEN_IDENTIFIER_FIELD_NUMBER = 8; |
| private java.lang.Object tokenIdentifier_; |
| /** |
| * <code>optional string token_identifier = 8;</code> |
| * |
| * <pre> |
| * The core vertex stuff |
| * </pre> |
| */ |
| public boolean hasTokenIdentifier() { |
| return ((bitField0_ & 0x00000080) == 0x00000080); |
| } |
| /** |
| * <code>optional string token_identifier = 8;</code> |
| * |
| * <pre> |
| * The core vertex stuff |
| * </pre> |
| */ |
| public java.lang.String getTokenIdentifier() { |
| java.lang.Object ref = tokenIdentifier_; |
| if (ref instanceof java.lang.String) { |
| return (java.lang.String) ref; |
| } else { |
| com.google.protobuf.ByteString bs = |
| (com.google.protobuf.ByteString) ref; |
| java.lang.String s = bs.toStringUtf8(); |
| if (bs.isValidUtf8()) { |
| tokenIdentifier_ = s; |
| } |
| return s; |
| } |
| } |
| /** |
| * <code>optional string token_identifier = 8;</code> |
| * |
| * <pre> |
| * The core vertex stuff |
| * </pre> |
| */ |
| public com.google.protobuf.ByteString |
| getTokenIdentifierBytes() { |
| java.lang.Object ref = tokenIdentifier_; |
| if (ref instanceof java.lang.String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| tokenIdentifier_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| |
| // optional .EntityDescriptorProto processor_descriptor = 9; |
| public static final int PROCESSOR_DESCRIPTOR_FIELD_NUMBER = 9; |
| private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto processorDescriptor_; |
| /** |
| * <code>optional .EntityDescriptorProto processor_descriptor = 9;</code> |
| */ |
| public boolean hasProcessorDescriptor() { |
| return ((bitField0_ & 0x00000100) == 0x00000100); |
| } |
| /** |
| * <code>optional .EntityDescriptorProto processor_descriptor = 9;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto getProcessorDescriptor() { |
| return processorDescriptor_; |
| } |
| /** |
| * <code>optional .EntityDescriptorProto processor_descriptor = 9;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProtoOrBuilder getProcessorDescriptorOrBuilder() { |
| return processorDescriptor_; |
| } |
| |
| // repeated .IOSpecProto input_specs = 10; |
| public static final int INPUT_SPECS_FIELD_NUMBER = 10; |
| private java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto> inputSpecs_; |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| public java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto> getInputSpecsList() { |
| return inputSpecs_; |
| } |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| public java.util.List<? extends org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder> |
| getInputSpecsOrBuilderList() { |
| return inputSpecs_; |
| } |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| public int getInputSpecsCount() { |
| return inputSpecs_.size(); |
| } |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto getInputSpecs(int index) { |
| return inputSpecs_.get(index); |
| } |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder getInputSpecsOrBuilder( |
| int index) { |
| return inputSpecs_.get(index); |
| } |
| |
| // repeated .IOSpecProto output_specs = 11; |
| public static final int OUTPUT_SPECS_FIELD_NUMBER = 11; |
| private java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto> outputSpecs_; |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| public java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto> getOutputSpecsList() { |
| return outputSpecs_; |
| } |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| public java.util.List<? extends org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder> |
| getOutputSpecsOrBuilderList() { |
| return outputSpecs_; |
| } |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| public int getOutputSpecsCount() { |
| return outputSpecs_.size(); |
| } |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto getOutputSpecs(int index) { |
| return outputSpecs_.get(index); |
| } |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder getOutputSpecsOrBuilder( |
| int index) { |
| return outputSpecs_.get(index); |
| } |
| |
| // repeated .GroupInputSpecProto grouped_input_specs = 12; |
| public static final int GROUPED_INPUT_SPECS_FIELD_NUMBER = 12; |
| private java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto> groupedInputSpecs_; |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| public java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto> getGroupedInputSpecsList() { |
| return groupedInputSpecs_; |
| } |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| public java.util.List<? extends org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProtoOrBuilder> |
| getGroupedInputSpecsOrBuilderList() { |
| return groupedInputSpecs_; |
| } |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| public int getGroupedInputSpecsCount() { |
| return groupedInputSpecs_.size(); |
| } |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto getGroupedInputSpecs(int index) { |
| return groupedInputSpecs_.get(index); |
| } |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProtoOrBuilder getGroupedInputSpecsOrBuilder( |
| int index) { |
| return groupedInputSpecs_.get(index); |
| } |
| |
| // optional int32 vertex_parallelism = 13; |
| public static final int VERTEX_PARALLELISM_FIELD_NUMBER = 13; |
| private int vertexParallelism_; |
| /** |
| * <code>optional int32 vertex_parallelism = 13;</code> |
| * |
| * <pre> |
| * An internal field required for Tez. |
| * </pre> |
| */ |
| public boolean hasVertexParallelism() { |
| return ((bitField0_ & 0x00000200) == 0x00000200); |
| } |
| /** |
| * <code>optional int32 vertex_parallelism = 13;</code> |
| * |
| * <pre> |
| * An internal field required for Tez. |
| * </pre> |
| */ |
| public int getVertexParallelism() { |
| return vertexParallelism_; |
| } |
| |
| private void initFields() { |
| user_ = ""; |
| signatureKeyId_ = 0L; |
| queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance(); |
| hiveQueryId_ = ""; |
| dagName_ = ""; |
| vertexName_ = ""; |
| vertexIndex_ = 0; |
| tokenIdentifier_ = ""; |
| processorDescriptor_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.getDefaultInstance(); |
| inputSpecs_ = java.util.Collections.emptyList(); |
| outputSpecs_ = java.util.Collections.emptyList(); |
| groupedInputSpecs_ = java.util.Collections.emptyList(); |
| vertexParallelism_ = 0; |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeBytes(1, getUserBytes()); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| output.writeInt64(2, signatureKeyId_); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| output.writeMessage(3, queryIdentifier_); |
| } |
| if (((bitField0_ & 0x00000008) == 0x00000008)) { |
| output.writeBytes(4, getHiveQueryIdBytes()); |
| } |
| if (((bitField0_ & 0x00000010) == 0x00000010)) { |
| output.writeBytes(5, getDagNameBytes()); |
| } |
| if (((bitField0_ & 0x00000020) == 0x00000020)) { |
| output.writeBytes(6, getVertexNameBytes()); |
| } |
| if (((bitField0_ & 0x00000040) == 0x00000040)) { |
| output.writeInt32(7, vertexIndex_); |
| } |
| if (((bitField0_ & 0x00000080) == 0x00000080)) { |
| output.writeBytes(8, getTokenIdentifierBytes()); |
| } |
| if (((bitField0_ & 0x00000100) == 0x00000100)) { |
| output.writeMessage(9, processorDescriptor_); |
| } |
| for (int i = 0; i < inputSpecs_.size(); i++) { |
| output.writeMessage(10, inputSpecs_.get(i)); |
| } |
| for (int i = 0; i < outputSpecs_.size(); i++) { |
| output.writeMessage(11, outputSpecs_.get(i)); |
| } |
| for (int i = 0; i < groupedInputSpecs_.size(); i++) { |
| output.writeMessage(12, groupedInputSpecs_.get(i)); |
| } |
| if (((bitField0_ & 0x00000200) == 0x00000200)) { |
| output.writeInt32(13, vertexParallelism_); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(1, getUserBytes()); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeInt64Size(2, signatureKeyId_); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(3, queryIdentifier_); |
| } |
| if (((bitField0_ & 0x00000008) == 0x00000008)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(4, getHiveQueryIdBytes()); |
| } |
| if (((bitField0_ & 0x00000010) == 0x00000010)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(5, getDagNameBytes()); |
| } |
| if (((bitField0_ & 0x00000020) == 0x00000020)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(6, getVertexNameBytes()); |
| } |
| if (((bitField0_ & 0x00000040) == 0x00000040)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeInt32Size(7, vertexIndex_); |
| } |
| if (((bitField0_ & 0x00000080) == 0x00000080)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(8, getTokenIdentifierBytes()); |
| } |
| if (((bitField0_ & 0x00000100) == 0x00000100)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(9, processorDescriptor_); |
| } |
| for (int i = 0; i < inputSpecs_.size(); i++) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(10, inputSpecs_.get(i)); |
| } |
| for (int i = 0; i < outputSpecs_.size(); i++) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(11, outputSpecs_.get(i)); |
| } |
| for (int i = 0; i < groupedInputSpecs_.size(); i++) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(12, groupedInputSpecs_.get(i)); |
| } |
| if (((bitField0_ & 0x00000200) == 0x00000200)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeInt32Size(13, vertexParallelism_); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec) obj; |
| |
| boolean result = true; |
| result = result && (hasUser() == other.hasUser()); |
| if (hasUser()) { |
| result = result && getUser() |
| .equals(other.getUser()); |
| } |
| result = result && (hasSignatureKeyId() == other.hasSignatureKeyId()); |
| if (hasSignatureKeyId()) { |
| result = result && (getSignatureKeyId() |
| == other.getSignatureKeyId()); |
| } |
| result = result && (hasQueryIdentifier() == other.hasQueryIdentifier()); |
| if (hasQueryIdentifier()) { |
| result = result && getQueryIdentifier() |
| .equals(other.getQueryIdentifier()); |
| } |
| result = result && (hasHiveQueryId() == other.hasHiveQueryId()); |
| if (hasHiveQueryId()) { |
| result = result && getHiveQueryId() |
| .equals(other.getHiveQueryId()); |
| } |
| result = result && (hasDagName() == other.hasDagName()); |
| if (hasDagName()) { |
| result = result && getDagName() |
| .equals(other.getDagName()); |
| } |
| result = result && (hasVertexName() == other.hasVertexName()); |
| if (hasVertexName()) { |
| result = result && getVertexName() |
| .equals(other.getVertexName()); |
| } |
| result = result && (hasVertexIndex() == other.hasVertexIndex()); |
| if (hasVertexIndex()) { |
| result = result && (getVertexIndex() |
| == other.getVertexIndex()); |
| } |
| result = result && (hasTokenIdentifier() == other.hasTokenIdentifier()); |
| if (hasTokenIdentifier()) { |
| result = result && getTokenIdentifier() |
| .equals(other.getTokenIdentifier()); |
| } |
| result = result && (hasProcessorDescriptor() == other.hasProcessorDescriptor()); |
| if (hasProcessorDescriptor()) { |
| result = result && getProcessorDescriptor() |
| .equals(other.getProcessorDescriptor()); |
| } |
| result = result && getInputSpecsList() |
| .equals(other.getInputSpecsList()); |
| result = result && getOutputSpecsList() |
| .equals(other.getOutputSpecsList()); |
| result = result && getGroupedInputSpecsList() |
| .equals(other.getGroupedInputSpecsList()); |
| result = result && (hasVertexParallelism() == other.hasVertexParallelism()); |
| if (hasVertexParallelism()) { |
| result = result && (getVertexParallelism() |
| == other.getVertexParallelism()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| private int memoizedHashCode = 0; |
| @java.lang.Override |
| public int hashCode() { |
| if (memoizedHashCode != 0) { |
| return memoizedHashCode; |
| } |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasUser()) { |
| hash = (37 * hash) + USER_FIELD_NUMBER; |
| hash = (53 * hash) + getUser().hashCode(); |
| } |
| if (hasSignatureKeyId()) { |
| hash = (37 * hash) + SIGNATUREKEYID_FIELD_NUMBER; |
| hash = (53 * hash) + hashLong(getSignatureKeyId()); |
| } |
| if (hasQueryIdentifier()) { |
| hash = (37 * hash) + QUERY_IDENTIFIER_FIELD_NUMBER; |
| hash = (53 * hash) + getQueryIdentifier().hashCode(); |
| } |
| if (hasHiveQueryId()) { |
| hash = (37 * hash) + HIVE_QUERY_ID_FIELD_NUMBER; |
| hash = (53 * hash) + getHiveQueryId().hashCode(); |
| } |
| if (hasDagName()) { |
| hash = (37 * hash) + DAG_NAME_FIELD_NUMBER; |
| hash = (53 * hash) + getDagName().hashCode(); |
| } |
| if (hasVertexName()) { |
| hash = (37 * hash) + VERTEX_NAME_FIELD_NUMBER; |
| hash = (53 * hash) + getVertexName().hashCode(); |
| } |
| if (hasVertexIndex()) { |
| hash = (37 * hash) + VERTEX_INDEX_FIELD_NUMBER; |
| hash = (53 * hash) + getVertexIndex(); |
| } |
| if (hasTokenIdentifier()) { |
| hash = (37 * hash) + TOKEN_IDENTIFIER_FIELD_NUMBER; |
| hash = (53 * hash) + getTokenIdentifier().hashCode(); |
| } |
| if (hasProcessorDescriptor()) { |
| hash = (37 * hash) + PROCESSOR_DESCRIPTOR_FIELD_NUMBER; |
| hash = (53 * hash) + getProcessorDescriptor().hashCode(); |
| } |
| if (getInputSpecsCount() > 0) { |
| hash = (37 * hash) + INPUT_SPECS_FIELD_NUMBER; |
| hash = (53 * hash) + getInputSpecsList().hashCode(); |
| } |
| if (getOutputSpecsCount() > 0) { |
| hash = (37 * hash) + OUTPUT_SPECS_FIELD_NUMBER; |
| hash = (53 * hash) + getOutputSpecsList().hashCode(); |
| } |
| if (getGroupedInputSpecsCount() > 0) { |
| hash = (37 * hash) + GROUPED_INPUT_SPECS_FIELD_NUMBER; |
| hash = (53 * hash) + getGroupedInputSpecsList().hashCode(); |
| } |
| if (hasVertexParallelism()) { |
| hash = (37 * hash) + VERTEX_PARALLELISM_FIELD_NUMBER; |
| hash = (53 * hash) + getVertexParallelism(); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| memoizedHashCode = hash; |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| /** |
| * Protobuf type {@code SignableVertexSpec} |
| * |
| * <pre> |
| * The part of SubmitWork that can be signed |
| * </pre> |
| */ |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpecOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SignableVertexSpec_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SignableVertexSpec_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec.Builder.class); |
| } |
| |
| // Construct using org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| getQueryIdentifierFieldBuilder(); |
| getProcessorDescriptorFieldBuilder(); |
| getInputSpecsFieldBuilder(); |
| getOutputSpecsFieldBuilder(); |
| getGroupedInputSpecsFieldBuilder(); |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| user_ = ""; |
| bitField0_ = (bitField0_ & ~0x00000001); |
| signatureKeyId_ = 0L; |
| bitField0_ = (bitField0_ & ~0x00000002); |
| if (queryIdentifierBuilder_ == null) { |
| queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance(); |
| } else { |
| queryIdentifierBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000004); |
| hiveQueryId_ = ""; |
| bitField0_ = (bitField0_ & ~0x00000008); |
| dagName_ = ""; |
| bitField0_ = (bitField0_ & ~0x00000010); |
| vertexName_ = ""; |
| bitField0_ = (bitField0_ & ~0x00000020); |
| vertexIndex_ = 0; |
| bitField0_ = (bitField0_ & ~0x00000040); |
| tokenIdentifier_ = ""; |
| bitField0_ = (bitField0_ & ~0x00000080); |
| if (processorDescriptorBuilder_ == null) { |
| processorDescriptor_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.getDefaultInstance(); |
| } else { |
| processorDescriptorBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000100); |
| if (inputSpecsBuilder_ == null) { |
| inputSpecs_ = java.util.Collections.emptyList(); |
| bitField0_ = (bitField0_ & ~0x00000200); |
| } else { |
| inputSpecsBuilder_.clear(); |
| } |
| if (outputSpecsBuilder_ == null) { |
| outputSpecs_ = java.util.Collections.emptyList(); |
| bitField0_ = (bitField0_ & ~0x00000400); |
| } else { |
| outputSpecsBuilder_.clear(); |
| } |
| if (groupedInputSpecsBuilder_ == null) { |
| groupedInputSpecs_ = java.util.Collections.emptyList(); |
| bitField0_ = (bitField0_ & ~0x00000800); |
| } else { |
| groupedInputSpecsBuilder_.clear(); |
| } |
| vertexParallelism_ = 0; |
| bitField0_ = (bitField0_ & ~0x00001000); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SignableVertexSpec_descriptor; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec getDefaultInstanceForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec build() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec buildPartial() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| result.user_ = user_; |
| if (((from_bitField0_ & 0x00000002) == 0x00000002)) { |
| to_bitField0_ |= 0x00000002; |
| } |
| result.signatureKeyId_ = signatureKeyId_; |
| if (((from_bitField0_ & 0x00000004) == 0x00000004)) { |
| to_bitField0_ |= 0x00000004; |
| } |
| if (queryIdentifierBuilder_ == null) { |
| result.queryIdentifier_ = queryIdentifier_; |
| } else { |
| result.queryIdentifier_ = queryIdentifierBuilder_.build(); |
| } |
| if (((from_bitField0_ & 0x00000008) == 0x00000008)) { |
| to_bitField0_ |= 0x00000008; |
| } |
| result.hiveQueryId_ = hiveQueryId_; |
| if (((from_bitField0_ & 0x00000010) == 0x00000010)) { |
| to_bitField0_ |= 0x00000010; |
| } |
| result.dagName_ = dagName_; |
| if (((from_bitField0_ & 0x00000020) == 0x00000020)) { |
| to_bitField0_ |= 0x00000020; |
| } |
| result.vertexName_ = vertexName_; |
| if (((from_bitField0_ & 0x00000040) == 0x00000040)) { |
| to_bitField0_ |= 0x00000040; |
| } |
| result.vertexIndex_ = vertexIndex_; |
| if (((from_bitField0_ & 0x00000080) == 0x00000080)) { |
| to_bitField0_ |= 0x00000080; |
| } |
| result.tokenIdentifier_ = tokenIdentifier_; |
| if (((from_bitField0_ & 0x00000100) == 0x00000100)) { |
| to_bitField0_ |= 0x00000100; |
| } |
| if (processorDescriptorBuilder_ == null) { |
| result.processorDescriptor_ = processorDescriptor_; |
| } else { |
| result.processorDescriptor_ = processorDescriptorBuilder_.build(); |
| } |
| if (inputSpecsBuilder_ == null) { |
| if (((bitField0_ & 0x00000200) == 0x00000200)) { |
| inputSpecs_ = java.util.Collections.unmodifiableList(inputSpecs_); |
| bitField0_ = (bitField0_ & ~0x00000200); |
| } |
| result.inputSpecs_ = inputSpecs_; |
| } else { |
| result.inputSpecs_ = inputSpecsBuilder_.build(); |
| } |
| if (outputSpecsBuilder_ == null) { |
| if (((bitField0_ & 0x00000400) == 0x00000400)) { |
| outputSpecs_ = java.util.Collections.unmodifiableList(outputSpecs_); |
| bitField0_ = (bitField0_ & ~0x00000400); |
| } |
| result.outputSpecs_ = outputSpecs_; |
| } else { |
| result.outputSpecs_ = outputSpecsBuilder_.build(); |
| } |
| if (groupedInputSpecsBuilder_ == null) { |
| if (((bitField0_ & 0x00000800) == 0x00000800)) { |
| groupedInputSpecs_ = java.util.Collections.unmodifiableList(groupedInputSpecs_); |
| bitField0_ = (bitField0_ & ~0x00000800); |
| } |
| result.groupedInputSpecs_ = groupedInputSpecs_; |
| } else { |
| result.groupedInputSpecs_ = groupedInputSpecsBuilder_.build(); |
| } |
| if (((from_bitField0_ & 0x00001000) == 0x00001000)) { |
| to_bitField0_ |= 0x00000200; |
| } |
| result.vertexParallelism_ = vertexParallelism_; |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec) { |
| return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec other) { |
| if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec.getDefaultInstance()) return this; |
| if (other.hasUser()) { |
| bitField0_ |= 0x00000001; |
| user_ = other.user_; |
| onChanged(); |
| } |
| if (other.hasSignatureKeyId()) { |
| setSignatureKeyId(other.getSignatureKeyId()); |
| } |
| if (other.hasQueryIdentifier()) { |
| mergeQueryIdentifier(other.getQueryIdentifier()); |
| } |
| if (other.hasHiveQueryId()) { |
| bitField0_ |= 0x00000008; |
| hiveQueryId_ = other.hiveQueryId_; |
| onChanged(); |
| } |
| if (other.hasDagName()) { |
| bitField0_ |= 0x00000010; |
| dagName_ = other.dagName_; |
| onChanged(); |
| } |
| if (other.hasVertexName()) { |
| bitField0_ |= 0x00000020; |
| vertexName_ = other.vertexName_; |
| onChanged(); |
| } |
| if (other.hasVertexIndex()) { |
| setVertexIndex(other.getVertexIndex()); |
| } |
| if (other.hasTokenIdentifier()) { |
| bitField0_ |= 0x00000080; |
| tokenIdentifier_ = other.tokenIdentifier_; |
| onChanged(); |
| } |
| if (other.hasProcessorDescriptor()) { |
| mergeProcessorDescriptor(other.getProcessorDescriptor()); |
| } |
| if (inputSpecsBuilder_ == null) { |
| if (!other.inputSpecs_.isEmpty()) { |
| if (inputSpecs_.isEmpty()) { |
| inputSpecs_ = other.inputSpecs_; |
| bitField0_ = (bitField0_ & ~0x00000200); |
| } else { |
| ensureInputSpecsIsMutable(); |
| inputSpecs_.addAll(other.inputSpecs_); |
| } |
| onChanged(); |
| } |
| } else { |
| if (!other.inputSpecs_.isEmpty()) { |
| if (inputSpecsBuilder_.isEmpty()) { |
| inputSpecsBuilder_.dispose(); |
| inputSpecsBuilder_ = null; |
| inputSpecs_ = other.inputSpecs_; |
| bitField0_ = (bitField0_ & ~0x00000200); |
| inputSpecsBuilder_ = |
| com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? |
| getInputSpecsFieldBuilder() : null; |
| } else { |
| inputSpecsBuilder_.addAllMessages(other.inputSpecs_); |
| } |
| } |
| } |
| if (outputSpecsBuilder_ == null) { |
| if (!other.outputSpecs_.isEmpty()) { |
| if (outputSpecs_.isEmpty()) { |
| outputSpecs_ = other.outputSpecs_; |
| bitField0_ = (bitField0_ & ~0x00000400); |
| } else { |
| ensureOutputSpecsIsMutable(); |
| outputSpecs_.addAll(other.outputSpecs_); |
| } |
| onChanged(); |
| } |
| } else { |
| if (!other.outputSpecs_.isEmpty()) { |
| if (outputSpecsBuilder_.isEmpty()) { |
| outputSpecsBuilder_.dispose(); |
| outputSpecsBuilder_ = null; |
| outputSpecs_ = other.outputSpecs_; |
| bitField0_ = (bitField0_ & ~0x00000400); |
| outputSpecsBuilder_ = |
| com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? |
| getOutputSpecsFieldBuilder() : null; |
| } else { |
| outputSpecsBuilder_.addAllMessages(other.outputSpecs_); |
| } |
| } |
| } |
| if (groupedInputSpecsBuilder_ == null) { |
| if (!other.groupedInputSpecs_.isEmpty()) { |
| if (groupedInputSpecs_.isEmpty()) { |
| groupedInputSpecs_ = other.groupedInputSpecs_; |
| bitField0_ = (bitField0_ & ~0x00000800); |
| } else { |
| ensureGroupedInputSpecsIsMutable(); |
| groupedInputSpecs_.addAll(other.groupedInputSpecs_); |
| } |
| onChanged(); |
| } |
| } else { |
| if (!other.groupedInputSpecs_.isEmpty()) { |
| if (groupedInputSpecsBuilder_.isEmpty()) { |
| groupedInputSpecsBuilder_.dispose(); |
| groupedInputSpecsBuilder_ = null; |
| groupedInputSpecs_ = other.groupedInputSpecs_; |
| bitField0_ = (bitField0_ & ~0x00000800); |
| groupedInputSpecsBuilder_ = |
| com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? |
| getGroupedInputSpecsFieldBuilder() : null; |
| } else { |
| groupedInputSpecsBuilder_.addAllMessages(other.groupedInputSpecs_); |
| } |
| } |
| } |
| if (other.hasVertexParallelism()) { |
| setVertexParallelism(other.getVertexParallelism()); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec parsedMessage = null; |
| try { |
| parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec) e.getUnfinishedMessage(); |
| throw e; |
| } finally { |
| if (parsedMessage != null) { |
| mergeFrom(parsedMessage); |
| } |
| } |
| return this; |
| } |
| private int bitField0_; |
| |
| // optional string user = 1; |
| private java.lang.Object user_ = ""; |
| /** |
| * <code>optional string user = 1;</code> |
| */ |
| public boolean hasUser() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional string user = 1;</code> |
| */ |
| public java.lang.String getUser() { |
| java.lang.Object ref = user_; |
| if (!(ref instanceof java.lang.String)) { |
| java.lang.String s = ((com.google.protobuf.ByteString) ref) |
| .toStringUtf8(); |
| user_ = s; |
| return s; |
| } else { |
| return (java.lang.String) ref; |
| } |
| } |
| /** |
| * <code>optional string user = 1;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getUserBytes() { |
| java.lang.Object ref = user_; |
| if (ref instanceof String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| user_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| /** |
| * <code>optional string user = 1;</code> |
| */ |
| public Builder setUser( |
| java.lang.String value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000001; |
| user_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string user = 1;</code> |
| */ |
| public Builder clearUser() { |
| bitField0_ = (bitField0_ & ~0x00000001); |
| user_ = getDefaultInstance().getUser(); |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string user = 1;</code> |
| */ |
| public Builder setUserBytes( |
| com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000001; |
| user_ = value; |
| onChanged(); |
| return this; |
| } |
| |
| // optional int64 signatureKeyId = 2; |
| private long signatureKeyId_ ; |
| /** |
| * <code>optional int64 signatureKeyId = 2;</code> |
| */ |
| public boolean hasSignatureKeyId() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional int64 signatureKeyId = 2;</code> |
| */ |
| public long getSignatureKeyId() { |
| return signatureKeyId_; |
| } |
| /** |
| * <code>optional int64 signatureKeyId = 2;</code> |
| */ |
| public Builder setSignatureKeyId(long value) { |
| bitField0_ |= 0x00000002; |
| signatureKeyId_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional int64 signatureKeyId = 2;</code> |
| */ |
| public Builder clearSignatureKeyId() { |
| bitField0_ = (bitField0_ & ~0x00000002); |
| signatureKeyId_ = 0L; |
| onChanged(); |
| return this; |
| } |
| |
| // optional .QueryIdentifierProto query_identifier = 3; |
| private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance(); |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder> queryIdentifierBuilder_; |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 3;</code> |
| */ |
| public boolean hasQueryIdentifier() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 3;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() { |
| if (queryIdentifierBuilder_ == null) { |
| return queryIdentifier_; |
| } else { |
| return queryIdentifierBuilder_.getMessage(); |
| } |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 3;</code> |
| */ |
| public Builder setQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto value) { |
| if (queryIdentifierBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| queryIdentifier_ = value; |
| onChanged(); |
| } else { |
| queryIdentifierBuilder_.setMessage(value); |
| } |
| bitField0_ |= 0x00000004; |
| return this; |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 3;</code> |
| */ |
| public Builder setQueryIdentifier( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder builderForValue) { |
| if (queryIdentifierBuilder_ == null) { |
| queryIdentifier_ = builderForValue.build(); |
| onChanged(); |
| } else { |
| queryIdentifierBuilder_.setMessage(builderForValue.build()); |
| } |
| bitField0_ |= 0x00000004; |
| return this; |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 3;</code> |
| */ |
| public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto value) { |
| if (queryIdentifierBuilder_ == null) { |
| if (((bitField0_ & 0x00000004) == 0x00000004) && |
| queryIdentifier_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance()) { |
| queryIdentifier_ = |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.newBuilder(queryIdentifier_).mergeFrom(value).buildPartial(); |
| } else { |
| queryIdentifier_ = value; |
| } |
| onChanged(); |
| } else { |
| queryIdentifierBuilder_.mergeFrom(value); |
| } |
| bitField0_ |= 0x00000004; |
| return this; |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 3;</code> |
| */ |
| public Builder clearQueryIdentifier() { |
| if (queryIdentifierBuilder_ == null) { |
| queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance(); |
| onChanged(); |
| } else { |
| queryIdentifierBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000004); |
| return this; |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 3;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder getQueryIdentifierBuilder() { |
| bitField0_ |= 0x00000004; |
| onChanged(); |
| return getQueryIdentifierFieldBuilder().getBuilder(); |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 3;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder() { |
| if (queryIdentifierBuilder_ != null) { |
| return queryIdentifierBuilder_.getMessageOrBuilder(); |
| } else { |
| return queryIdentifier_; |
| } |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 3;</code> |
| */ |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder> |
| getQueryIdentifierFieldBuilder() { |
| if (queryIdentifierBuilder_ == null) { |
| queryIdentifierBuilder_ = new com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder>( |
| queryIdentifier_, |
| getParentForChildren(), |
| isClean()); |
| queryIdentifier_ = null; |
| } |
| return queryIdentifierBuilder_; |
| } |
| |
| // optional string hive_query_id = 4; |
| private java.lang.Object hiveQueryId_ = ""; |
| /** |
| * <code>optional string hive_query_id = 4;</code> |
| */ |
| public boolean hasHiveQueryId() { |
| return ((bitField0_ & 0x00000008) == 0x00000008); |
| } |
| /** |
| * <code>optional string hive_query_id = 4;</code> |
| */ |
| public java.lang.String getHiveQueryId() { |
| java.lang.Object ref = hiveQueryId_; |
| if (!(ref instanceof java.lang.String)) { |
| java.lang.String s = ((com.google.protobuf.ByteString) ref) |
| .toStringUtf8(); |
| hiveQueryId_ = s; |
| return s; |
| } else { |
| return (java.lang.String) ref; |
| } |
| } |
| /** |
| * <code>optional string hive_query_id = 4;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getHiveQueryIdBytes() { |
| java.lang.Object ref = hiveQueryId_; |
| if (ref instanceof String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| hiveQueryId_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| /** |
| * <code>optional string hive_query_id = 4;</code> |
| */ |
| public Builder setHiveQueryId( |
| java.lang.String value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000008; |
| hiveQueryId_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string hive_query_id = 4;</code> |
| */ |
| public Builder clearHiveQueryId() { |
| bitField0_ = (bitField0_ & ~0x00000008); |
| hiveQueryId_ = getDefaultInstance().getHiveQueryId(); |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string hive_query_id = 4;</code> |
| */ |
| public Builder setHiveQueryIdBytes( |
| com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000008; |
| hiveQueryId_ = value; |
| onChanged(); |
| return this; |
| } |
| |
| // optional string dag_name = 5; |
| private java.lang.Object dagName_ = ""; |
| /** |
| * <code>optional string dag_name = 5;</code> |
| * |
| * <pre> |
| * Display names cannot be modified by the client for now. If needed, they should be sent to HS2 who will put them here. |
| * </pre> |
| */ |
| public boolean hasDagName() { |
| return ((bitField0_ & 0x00000010) == 0x00000010); |
| } |
| /** |
| * <code>optional string dag_name = 5;</code> |
| * |
| * <pre> |
| * Display names cannot be modified by the client for now. If needed, they should be sent to HS2 who will put them here. |
| * </pre> |
| */ |
| public java.lang.String getDagName() { |
| java.lang.Object ref = dagName_; |
| if (!(ref instanceof java.lang.String)) { |
| java.lang.String s = ((com.google.protobuf.ByteString) ref) |
| .toStringUtf8(); |
| dagName_ = s; |
| return s; |
| } else { |
| return (java.lang.String) ref; |
| } |
| } |
| /** |
| * <code>optional string dag_name = 5;</code> |
| * |
| * <pre> |
| * Display names cannot be modified by the client for now. If needed, they should be sent to HS2 who will put them here. |
| * </pre> |
| */ |
| public com.google.protobuf.ByteString |
| getDagNameBytes() { |
| java.lang.Object ref = dagName_; |
| if (ref instanceof String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| dagName_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| /** |
| * <code>optional string dag_name = 5;</code> |
| * |
| * <pre> |
| * Display names cannot be modified by the client for now. If needed, they should be sent to HS2 who will put them here. |
| * </pre> |
| */ |
| public Builder setDagName( |
| java.lang.String value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000010; |
| dagName_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string dag_name = 5;</code> |
| * |
| * <pre> |
| * Display names cannot be modified by the client for now. If needed, they should be sent to HS2 who will put them here. |
| * </pre> |
| */ |
| public Builder clearDagName() { |
| bitField0_ = (bitField0_ & ~0x00000010); |
| dagName_ = getDefaultInstance().getDagName(); |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string dag_name = 5;</code> |
| * |
| * <pre> |
| * Display names cannot be modified by the client for now. If needed, they should be sent to HS2 who will put them here. |
| * </pre> |
| */ |
| public Builder setDagNameBytes( |
| com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000010; |
| dagName_ = value; |
| onChanged(); |
| return this; |
| } |
| |
| // optional string vertex_name = 6; |
| private java.lang.Object vertexName_ = ""; |
| /** |
| * <code>optional string vertex_name = 6;</code> |
| */ |
| public boolean hasVertexName() { |
| return ((bitField0_ & 0x00000020) == 0x00000020); |
| } |
| /** |
| * <code>optional string vertex_name = 6;</code> |
| */ |
| public java.lang.String getVertexName() { |
| java.lang.Object ref = vertexName_; |
| if (!(ref instanceof java.lang.String)) { |
| java.lang.String s = ((com.google.protobuf.ByteString) ref) |
| .toStringUtf8(); |
| vertexName_ = s; |
| return s; |
| } else { |
| return (java.lang.String) ref; |
| } |
| } |
| /** |
| * <code>optional string vertex_name = 6;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getVertexNameBytes() { |
| java.lang.Object ref = vertexName_; |
| if (ref instanceof String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| vertexName_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| /** |
| * <code>optional string vertex_name = 6;</code> |
| */ |
| public Builder setVertexName( |
| java.lang.String value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000020; |
| vertexName_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string vertex_name = 6;</code> |
| */ |
| public Builder clearVertexName() { |
| bitField0_ = (bitField0_ & ~0x00000020); |
| vertexName_ = getDefaultInstance().getVertexName(); |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string vertex_name = 6;</code> |
| */ |
| public Builder setVertexNameBytes( |
| com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000020; |
| vertexName_ = value; |
| onChanged(); |
| return this; |
| } |
| |
| // optional int32 vertex_index = 7; |
| private int vertexIndex_ ; |
| /** |
| * <code>optional int32 vertex_index = 7;</code> |
| */ |
| public boolean hasVertexIndex() { |
| return ((bitField0_ & 0x00000040) == 0x00000040); |
| } |
| /** |
| * <code>optional int32 vertex_index = 7;</code> |
| */ |
| public int getVertexIndex() { |
| return vertexIndex_; |
| } |
| /** |
| * <code>optional int32 vertex_index = 7;</code> |
| */ |
| public Builder setVertexIndex(int value) { |
| bitField0_ |= 0x00000040; |
| vertexIndex_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional int32 vertex_index = 7;</code> |
| */ |
| public Builder clearVertexIndex() { |
| bitField0_ = (bitField0_ & ~0x00000040); |
| vertexIndex_ = 0; |
| onChanged(); |
| return this; |
| } |
| |
| // optional string token_identifier = 8; |
| private java.lang.Object tokenIdentifier_ = ""; |
| /** |
| * <code>optional string token_identifier = 8;</code> |
| * |
| * <pre> |
| * The core vertex stuff |
| * </pre> |
| */ |
| public boolean hasTokenIdentifier() { |
| return ((bitField0_ & 0x00000080) == 0x00000080); |
| } |
| /** |
| * <code>optional string token_identifier = 8;</code> |
| * |
| * <pre> |
| * The core vertex stuff |
| * </pre> |
| */ |
| public java.lang.String getTokenIdentifier() { |
| java.lang.Object ref = tokenIdentifier_; |
| if (!(ref instanceof java.lang.String)) { |
| java.lang.String s = ((com.google.protobuf.ByteString) ref) |
| .toStringUtf8(); |
| tokenIdentifier_ = s; |
| return s; |
| } else { |
| return (java.lang.String) ref; |
| } |
| } |
| /** |
| * <code>optional string token_identifier = 8;</code> |
| * |
| * <pre> |
| * The core vertex stuff |
| * </pre> |
| */ |
| public com.google.protobuf.ByteString |
| getTokenIdentifierBytes() { |
| java.lang.Object ref = tokenIdentifier_; |
| if (ref instanceof String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| tokenIdentifier_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| /** |
| * <code>optional string token_identifier = 8;</code> |
| * |
| * <pre> |
| * The core vertex stuff |
| * </pre> |
| */ |
| public Builder setTokenIdentifier( |
| java.lang.String value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000080; |
| tokenIdentifier_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string token_identifier = 8;</code> |
| * |
| * <pre> |
| * The core vertex stuff |
| * </pre> |
| */ |
| public Builder clearTokenIdentifier() { |
| bitField0_ = (bitField0_ & ~0x00000080); |
| tokenIdentifier_ = getDefaultInstance().getTokenIdentifier(); |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string token_identifier = 8;</code> |
| * |
| * <pre> |
| * The core vertex stuff |
| * </pre> |
| */ |
| public Builder setTokenIdentifierBytes( |
| com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000080; |
| tokenIdentifier_ = value; |
| onChanged(); |
| return this; |
| } |
| |
| // optional .EntityDescriptorProto processor_descriptor = 9; |
| private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto processorDescriptor_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.getDefaultInstance(); |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProtoOrBuilder> processorDescriptorBuilder_; |
| /** |
| * <code>optional .EntityDescriptorProto processor_descriptor = 9;</code> |
| */ |
| public boolean hasProcessorDescriptor() { |
| return ((bitField0_ & 0x00000100) == 0x00000100); |
| } |
| /** |
| * <code>optional .EntityDescriptorProto processor_descriptor = 9;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto getProcessorDescriptor() { |
| if (processorDescriptorBuilder_ == null) { |
| return processorDescriptor_; |
| } else { |
| return processorDescriptorBuilder_.getMessage(); |
| } |
| } |
| /** |
| * <code>optional .EntityDescriptorProto processor_descriptor = 9;</code> |
| */ |
| public Builder setProcessorDescriptor(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto value) { |
| if (processorDescriptorBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| processorDescriptor_ = value; |
| onChanged(); |
| } else { |
| processorDescriptorBuilder_.setMessage(value); |
| } |
| bitField0_ |= 0x00000100; |
| return this; |
| } |
| /** |
| * <code>optional .EntityDescriptorProto processor_descriptor = 9;</code> |
| */ |
| public Builder setProcessorDescriptor( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.Builder builderForValue) { |
| if (processorDescriptorBuilder_ == null) { |
| processorDescriptor_ = builderForValue.build(); |
| onChanged(); |
| } else { |
| processorDescriptorBuilder_.setMessage(builderForValue.build()); |
| } |
| bitField0_ |= 0x00000100; |
| return this; |
| } |
| /** |
| * <code>optional .EntityDescriptorProto processor_descriptor = 9;</code> |
| */ |
| public Builder mergeProcessorDescriptor(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto value) { |
| if (processorDescriptorBuilder_ == null) { |
| if (((bitField0_ & 0x00000100) == 0x00000100) && |
| processorDescriptor_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.getDefaultInstance()) { |
| processorDescriptor_ = |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.newBuilder(processorDescriptor_).mergeFrom(value).buildPartial(); |
| } else { |
| processorDescriptor_ = value; |
| } |
| onChanged(); |
| } else { |
| processorDescriptorBuilder_.mergeFrom(value); |
| } |
| bitField0_ |= 0x00000100; |
| return this; |
| } |
| /** |
| * <code>optional .EntityDescriptorProto processor_descriptor = 9;</code> |
| */ |
| public Builder clearProcessorDescriptor() { |
| if (processorDescriptorBuilder_ == null) { |
| processorDescriptor_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.getDefaultInstance(); |
| onChanged(); |
| } else { |
| processorDescriptorBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000100); |
| return this; |
| } |
| /** |
| * <code>optional .EntityDescriptorProto processor_descriptor = 9;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.Builder getProcessorDescriptorBuilder() { |
| bitField0_ |= 0x00000100; |
| onChanged(); |
| return getProcessorDescriptorFieldBuilder().getBuilder(); |
| } |
| /** |
| * <code>optional .EntityDescriptorProto processor_descriptor = 9;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProtoOrBuilder getProcessorDescriptorOrBuilder() { |
| if (processorDescriptorBuilder_ != null) { |
| return processorDescriptorBuilder_.getMessageOrBuilder(); |
| } else { |
| return processorDescriptor_; |
| } |
| } |
| /** |
| * <code>optional .EntityDescriptorProto processor_descriptor = 9;</code> |
| */ |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProtoOrBuilder> |
| getProcessorDescriptorFieldBuilder() { |
| if (processorDescriptorBuilder_ == null) { |
| processorDescriptorBuilder_ = new com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.EntityDescriptorProtoOrBuilder>( |
| processorDescriptor_, |
| getParentForChildren(), |
| isClean()); |
| processorDescriptor_ = null; |
| } |
| return processorDescriptorBuilder_; |
| } |
| |
| // repeated .IOSpecProto input_specs = 10; |
| private java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto> inputSpecs_ = |
| java.util.Collections.emptyList(); |
| private void ensureInputSpecsIsMutable() { |
| if (!((bitField0_ & 0x00000200) == 0x00000200)) { |
| inputSpecs_ = new java.util.ArrayList<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto>(inputSpecs_); |
| bitField0_ |= 0x00000200; |
| } |
| } |
| |
| private com.google.protobuf.RepeatedFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder> inputSpecsBuilder_; |
| |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| public java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto> getInputSpecsList() { |
| if (inputSpecsBuilder_ == null) { |
| return java.util.Collections.unmodifiableList(inputSpecs_); |
| } else { |
| return inputSpecsBuilder_.getMessageList(); |
| } |
| } |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| public int getInputSpecsCount() { |
| if (inputSpecsBuilder_ == null) { |
| return inputSpecs_.size(); |
| } else { |
| return inputSpecsBuilder_.getCount(); |
| } |
| } |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto getInputSpecs(int index) { |
| if (inputSpecsBuilder_ == null) { |
| return inputSpecs_.get(index); |
| } else { |
| return inputSpecsBuilder_.getMessage(index); |
| } |
| } |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| public Builder setInputSpecs( |
| int index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto value) { |
| if (inputSpecsBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureInputSpecsIsMutable(); |
| inputSpecs_.set(index, value); |
| onChanged(); |
| } else { |
| inputSpecsBuilder_.setMessage(index, value); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| public Builder setInputSpecs( |
| int index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder builderForValue) { |
| if (inputSpecsBuilder_ == null) { |
| ensureInputSpecsIsMutable(); |
| inputSpecs_.set(index, builderForValue.build()); |
| onChanged(); |
| } else { |
| inputSpecsBuilder_.setMessage(index, builderForValue.build()); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| public Builder addInputSpecs(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto value) { |
| if (inputSpecsBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureInputSpecsIsMutable(); |
| inputSpecs_.add(value); |
| onChanged(); |
| } else { |
| inputSpecsBuilder_.addMessage(value); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| public Builder addInputSpecs( |
| int index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto value) { |
| if (inputSpecsBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureInputSpecsIsMutable(); |
| inputSpecs_.add(index, value); |
| onChanged(); |
| } else { |
| inputSpecsBuilder_.addMessage(index, value); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| public Builder addInputSpecs( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder builderForValue) { |
| if (inputSpecsBuilder_ == null) { |
| ensureInputSpecsIsMutable(); |
| inputSpecs_.add(builderForValue.build()); |
| onChanged(); |
| } else { |
| inputSpecsBuilder_.addMessage(builderForValue.build()); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| public Builder addInputSpecs( |
| int index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder builderForValue) { |
| if (inputSpecsBuilder_ == null) { |
| ensureInputSpecsIsMutable(); |
| inputSpecs_.add(index, builderForValue.build()); |
| onChanged(); |
| } else { |
| inputSpecsBuilder_.addMessage(index, builderForValue.build()); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| public Builder addAllInputSpecs( |
| java.lang.Iterable<? extends org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto> values) { |
| if (inputSpecsBuilder_ == null) { |
| ensureInputSpecsIsMutable(); |
| super.addAll(values, inputSpecs_); |
| onChanged(); |
| } else { |
| inputSpecsBuilder_.addAllMessages(values); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| public Builder clearInputSpecs() { |
| if (inputSpecsBuilder_ == null) { |
| inputSpecs_ = java.util.Collections.emptyList(); |
| bitField0_ = (bitField0_ & ~0x00000200); |
| onChanged(); |
| } else { |
| inputSpecsBuilder_.clear(); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| public Builder removeInputSpecs(int index) { |
| if (inputSpecsBuilder_ == null) { |
| ensureInputSpecsIsMutable(); |
| inputSpecs_.remove(index); |
| onChanged(); |
| } else { |
| inputSpecsBuilder_.remove(index); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder getInputSpecsBuilder( |
| int index) { |
| return getInputSpecsFieldBuilder().getBuilder(index); |
| } |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder getInputSpecsOrBuilder( |
| int index) { |
| if (inputSpecsBuilder_ == null) { |
| return inputSpecs_.get(index); } else { |
| return inputSpecsBuilder_.getMessageOrBuilder(index); |
| } |
| } |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| public java.util.List<? extends org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder> |
| getInputSpecsOrBuilderList() { |
| if (inputSpecsBuilder_ != null) { |
| return inputSpecsBuilder_.getMessageOrBuilderList(); |
| } else { |
| return java.util.Collections.unmodifiableList(inputSpecs_); |
| } |
| } |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder addInputSpecsBuilder() { |
| return getInputSpecsFieldBuilder().addBuilder( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.getDefaultInstance()); |
| } |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder addInputSpecsBuilder( |
| int index) { |
| return getInputSpecsFieldBuilder().addBuilder( |
| index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.getDefaultInstance()); |
| } |
| /** |
| * <code>repeated .IOSpecProto input_specs = 10;</code> |
| */ |
| public java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder> |
| getInputSpecsBuilderList() { |
| return getInputSpecsFieldBuilder().getBuilderList(); |
| } |
| private com.google.protobuf.RepeatedFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder> |
| getInputSpecsFieldBuilder() { |
| if (inputSpecsBuilder_ == null) { |
| inputSpecsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder>( |
| inputSpecs_, |
| ((bitField0_ & 0x00000200) == 0x00000200), |
| getParentForChildren(), |
| isClean()); |
| inputSpecs_ = null; |
| } |
| return inputSpecsBuilder_; |
| } |
| |
| // repeated .IOSpecProto output_specs = 11; |
| private java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto> outputSpecs_ = |
| java.util.Collections.emptyList(); |
| private void ensureOutputSpecsIsMutable() { |
| if (!((bitField0_ & 0x00000400) == 0x00000400)) { |
| outputSpecs_ = new java.util.ArrayList<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto>(outputSpecs_); |
| bitField0_ |= 0x00000400; |
| } |
| } |
| |
| private com.google.protobuf.RepeatedFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder> outputSpecsBuilder_; |
| |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| public java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto> getOutputSpecsList() { |
| if (outputSpecsBuilder_ == null) { |
| return java.util.Collections.unmodifiableList(outputSpecs_); |
| } else { |
| return outputSpecsBuilder_.getMessageList(); |
| } |
| } |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| public int getOutputSpecsCount() { |
| if (outputSpecsBuilder_ == null) { |
| return outputSpecs_.size(); |
| } else { |
| return outputSpecsBuilder_.getCount(); |
| } |
| } |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto getOutputSpecs(int index) { |
| if (outputSpecsBuilder_ == null) { |
| return outputSpecs_.get(index); |
| } else { |
| return outputSpecsBuilder_.getMessage(index); |
| } |
| } |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| public Builder setOutputSpecs( |
| int index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto value) { |
| if (outputSpecsBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureOutputSpecsIsMutable(); |
| outputSpecs_.set(index, value); |
| onChanged(); |
| } else { |
| outputSpecsBuilder_.setMessage(index, value); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| public Builder setOutputSpecs( |
| int index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder builderForValue) { |
| if (outputSpecsBuilder_ == null) { |
| ensureOutputSpecsIsMutable(); |
| outputSpecs_.set(index, builderForValue.build()); |
| onChanged(); |
| } else { |
| outputSpecsBuilder_.setMessage(index, builderForValue.build()); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| public Builder addOutputSpecs(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto value) { |
| if (outputSpecsBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureOutputSpecsIsMutable(); |
| outputSpecs_.add(value); |
| onChanged(); |
| } else { |
| outputSpecsBuilder_.addMessage(value); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| public Builder addOutputSpecs( |
| int index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto value) { |
| if (outputSpecsBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureOutputSpecsIsMutable(); |
| outputSpecs_.add(index, value); |
| onChanged(); |
| } else { |
| outputSpecsBuilder_.addMessage(index, value); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| public Builder addOutputSpecs( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder builderForValue) { |
| if (outputSpecsBuilder_ == null) { |
| ensureOutputSpecsIsMutable(); |
| outputSpecs_.add(builderForValue.build()); |
| onChanged(); |
| } else { |
| outputSpecsBuilder_.addMessage(builderForValue.build()); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| public Builder addOutputSpecs( |
| int index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder builderForValue) { |
| if (outputSpecsBuilder_ == null) { |
| ensureOutputSpecsIsMutable(); |
| outputSpecs_.add(index, builderForValue.build()); |
| onChanged(); |
| } else { |
| outputSpecsBuilder_.addMessage(index, builderForValue.build()); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| public Builder addAllOutputSpecs( |
| java.lang.Iterable<? extends org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto> values) { |
| if (outputSpecsBuilder_ == null) { |
| ensureOutputSpecsIsMutable(); |
| super.addAll(values, outputSpecs_); |
| onChanged(); |
| } else { |
| outputSpecsBuilder_.addAllMessages(values); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| public Builder clearOutputSpecs() { |
| if (outputSpecsBuilder_ == null) { |
| outputSpecs_ = java.util.Collections.emptyList(); |
| bitField0_ = (bitField0_ & ~0x00000400); |
| onChanged(); |
| } else { |
| outputSpecsBuilder_.clear(); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| public Builder removeOutputSpecs(int index) { |
| if (outputSpecsBuilder_ == null) { |
| ensureOutputSpecsIsMutable(); |
| outputSpecs_.remove(index); |
| onChanged(); |
| } else { |
| outputSpecsBuilder_.remove(index); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder getOutputSpecsBuilder( |
| int index) { |
| return getOutputSpecsFieldBuilder().getBuilder(index); |
| } |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder getOutputSpecsOrBuilder( |
| int index) { |
| if (outputSpecsBuilder_ == null) { |
| return outputSpecs_.get(index); } else { |
| return outputSpecsBuilder_.getMessageOrBuilder(index); |
| } |
| } |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| public java.util.List<? extends org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder> |
| getOutputSpecsOrBuilderList() { |
| if (outputSpecsBuilder_ != null) { |
| return outputSpecsBuilder_.getMessageOrBuilderList(); |
| } else { |
| return java.util.Collections.unmodifiableList(outputSpecs_); |
| } |
| } |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder addOutputSpecsBuilder() { |
| return getOutputSpecsFieldBuilder().addBuilder( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.getDefaultInstance()); |
| } |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder addOutputSpecsBuilder( |
| int index) { |
| return getOutputSpecsFieldBuilder().addBuilder( |
| index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.getDefaultInstance()); |
| } |
| /** |
| * <code>repeated .IOSpecProto output_specs = 11;</code> |
| */ |
| public java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder> |
| getOutputSpecsBuilderList() { |
| return getOutputSpecsFieldBuilder().getBuilderList(); |
| } |
| private com.google.protobuf.RepeatedFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder> |
| getOutputSpecsFieldBuilder() { |
| if (outputSpecsBuilder_ == null) { |
| outputSpecsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.IOSpecProtoOrBuilder>( |
| outputSpecs_, |
| ((bitField0_ & 0x00000400) == 0x00000400), |
| getParentForChildren(), |
| isClean()); |
| outputSpecs_ = null; |
| } |
| return outputSpecsBuilder_; |
| } |
| |
| // repeated .GroupInputSpecProto grouped_input_specs = 12; |
| private java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto> groupedInputSpecs_ = |
| java.util.Collections.emptyList(); |
| private void ensureGroupedInputSpecsIsMutable() { |
| if (!((bitField0_ & 0x00000800) == 0x00000800)) { |
| groupedInputSpecs_ = new java.util.ArrayList<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto>(groupedInputSpecs_); |
| bitField0_ |= 0x00000800; |
| } |
| } |
| |
| private com.google.protobuf.RepeatedFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProtoOrBuilder> groupedInputSpecsBuilder_; |
| |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| public java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto> getGroupedInputSpecsList() { |
| if (groupedInputSpecsBuilder_ == null) { |
| return java.util.Collections.unmodifiableList(groupedInputSpecs_); |
| } else { |
| return groupedInputSpecsBuilder_.getMessageList(); |
| } |
| } |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| public int getGroupedInputSpecsCount() { |
| if (groupedInputSpecsBuilder_ == null) { |
| return groupedInputSpecs_.size(); |
| } else { |
| return groupedInputSpecsBuilder_.getCount(); |
| } |
| } |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto getGroupedInputSpecs(int index) { |
| if (groupedInputSpecsBuilder_ == null) { |
| return groupedInputSpecs_.get(index); |
| } else { |
| return groupedInputSpecsBuilder_.getMessage(index); |
| } |
| } |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| public Builder setGroupedInputSpecs( |
| int index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto value) { |
| if (groupedInputSpecsBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureGroupedInputSpecsIsMutable(); |
| groupedInputSpecs_.set(index, value); |
| onChanged(); |
| } else { |
| groupedInputSpecsBuilder_.setMessage(index, value); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| public Builder setGroupedInputSpecs( |
| int index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.Builder builderForValue) { |
| if (groupedInputSpecsBuilder_ == null) { |
| ensureGroupedInputSpecsIsMutable(); |
| groupedInputSpecs_.set(index, builderForValue.build()); |
| onChanged(); |
| } else { |
| groupedInputSpecsBuilder_.setMessage(index, builderForValue.build()); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| public Builder addGroupedInputSpecs(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto value) { |
| if (groupedInputSpecsBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureGroupedInputSpecsIsMutable(); |
| groupedInputSpecs_.add(value); |
| onChanged(); |
| } else { |
| groupedInputSpecsBuilder_.addMessage(value); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| public Builder addGroupedInputSpecs( |
| int index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto value) { |
| if (groupedInputSpecsBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureGroupedInputSpecsIsMutable(); |
| groupedInputSpecs_.add(index, value); |
| onChanged(); |
| } else { |
| groupedInputSpecsBuilder_.addMessage(index, value); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| public Builder addGroupedInputSpecs( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.Builder builderForValue) { |
| if (groupedInputSpecsBuilder_ == null) { |
| ensureGroupedInputSpecsIsMutable(); |
| groupedInputSpecs_.add(builderForValue.build()); |
| onChanged(); |
| } else { |
| groupedInputSpecsBuilder_.addMessage(builderForValue.build()); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| public Builder addGroupedInputSpecs( |
| int index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.Builder builderForValue) { |
| if (groupedInputSpecsBuilder_ == null) { |
| ensureGroupedInputSpecsIsMutable(); |
| groupedInputSpecs_.add(index, builderForValue.build()); |
| onChanged(); |
| } else { |
| groupedInputSpecsBuilder_.addMessage(index, builderForValue.build()); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| public Builder addAllGroupedInputSpecs( |
| java.lang.Iterable<? extends org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto> values) { |
| if (groupedInputSpecsBuilder_ == null) { |
| ensureGroupedInputSpecsIsMutable(); |
| super.addAll(values, groupedInputSpecs_); |
| onChanged(); |
| } else { |
| groupedInputSpecsBuilder_.addAllMessages(values); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| public Builder clearGroupedInputSpecs() { |
| if (groupedInputSpecsBuilder_ == null) { |
| groupedInputSpecs_ = java.util.Collections.emptyList(); |
| bitField0_ = (bitField0_ & ~0x00000800); |
| onChanged(); |
| } else { |
| groupedInputSpecsBuilder_.clear(); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| public Builder removeGroupedInputSpecs(int index) { |
| if (groupedInputSpecsBuilder_ == null) { |
| ensureGroupedInputSpecsIsMutable(); |
| groupedInputSpecs_.remove(index); |
| onChanged(); |
| } else { |
| groupedInputSpecsBuilder_.remove(index); |
| } |
| return this; |
| } |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.Builder getGroupedInputSpecsBuilder( |
| int index) { |
| return getGroupedInputSpecsFieldBuilder().getBuilder(index); |
| } |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProtoOrBuilder getGroupedInputSpecsOrBuilder( |
| int index) { |
| if (groupedInputSpecsBuilder_ == null) { |
| return groupedInputSpecs_.get(index); } else { |
| return groupedInputSpecsBuilder_.getMessageOrBuilder(index); |
| } |
| } |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| public java.util.List<? extends org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProtoOrBuilder> |
| getGroupedInputSpecsOrBuilderList() { |
| if (groupedInputSpecsBuilder_ != null) { |
| return groupedInputSpecsBuilder_.getMessageOrBuilderList(); |
| } else { |
| return java.util.Collections.unmodifiableList(groupedInputSpecs_); |
| } |
| } |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.Builder addGroupedInputSpecsBuilder() { |
| return getGroupedInputSpecsFieldBuilder().addBuilder( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.getDefaultInstance()); |
| } |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.Builder addGroupedInputSpecsBuilder( |
| int index) { |
| return getGroupedInputSpecsFieldBuilder().addBuilder( |
| index, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.getDefaultInstance()); |
| } |
| /** |
| * <code>repeated .GroupInputSpecProto grouped_input_specs = 12;</code> |
| */ |
| public java.util.List<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.Builder> |
| getGroupedInputSpecsBuilderList() { |
| return getGroupedInputSpecsFieldBuilder().getBuilderList(); |
| } |
| private com.google.protobuf.RepeatedFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProtoOrBuilder> |
| getGroupedInputSpecsFieldBuilder() { |
| if (groupedInputSpecsBuilder_ == null) { |
| groupedInputSpecsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GroupInputSpecProtoOrBuilder>( |
| groupedInputSpecs_, |
| ((bitField0_ & 0x00000800) == 0x00000800), |
| getParentForChildren(), |
| isClean()); |
| groupedInputSpecs_ = null; |
| } |
| return groupedInputSpecsBuilder_; |
| } |
| |
| // optional int32 vertex_parallelism = 13; |
| private int vertexParallelism_ ; |
| /** |
| * <code>optional int32 vertex_parallelism = 13;</code> |
| * |
| * <pre> |
| * An internal field required for Tez. |
| * </pre> |
| */ |
| public boolean hasVertexParallelism() { |
| return ((bitField0_ & 0x00001000) == 0x00001000); |
| } |
| /** |
| * <code>optional int32 vertex_parallelism = 13;</code> |
| * |
| * <pre> |
| * An internal field required for Tez. |
| * </pre> |
| */ |
| public int getVertexParallelism() { |
| return vertexParallelism_; |
| } |
| /** |
| * <code>optional int32 vertex_parallelism = 13;</code> |
| * |
| * <pre> |
| * An internal field required for Tez. |
| * </pre> |
| */ |
| public Builder setVertexParallelism(int value) { |
| bitField0_ |= 0x00001000; |
| vertexParallelism_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional int32 vertex_parallelism = 13;</code> |
| * |
| * <pre> |
| * An internal field required for Tez. |
| * </pre> |
| */ |
| public Builder clearVertexParallelism() { |
| bitField0_ = (bitField0_ & ~0x00001000); |
| vertexParallelism_ = 0; |
| onChanged(); |
| return this; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:SignableVertexSpec) |
| } |
| |
| static { |
| defaultInstance = new SignableVertexSpec(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:SignableVertexSpec) |
| } |
| |
| public interface VertexOrBinaryOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // optional .SignableVertexSpec vertex = 1; |
| /** |
| * <code>optional .SignableVertexSpec vertex = 1;</code> |
| */ |
| boolean hasVertex(); |
| /** |
| * <code>optional .SignableVertexSpec vertex = 1;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec getVertex(); |
| /** |
| * <code>optional .SignableVertexSpec vertex = 1;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpecOrBuilder getVertexOrBuilder(); |
| |
| // optional bytes vertexBinary = 2; |
| /** |
| * <code>optional bytes vertexBinary = 2;</code> |
| * |
| * <pre> |
| * SignableVertexSpec |
| * </pre> |
| */ |
| boolean hasVertexBinary(); |
| /** |
| * <code>optional bytes vertexBinary = 2;</code> |
| * |
| * <pre> |
| * SignableVertexSpec |
| * </pre> |
| */ |
| com.google.protobuf.ByteString getVertexBinary(); |
| } |
| /** |
| * Protobuf type {@code VertexOrBinary} |
| * |
| * <pre> |
| * Union |
| * </pre> |
| */ |
| public static final class VertexOrBinary extends |
| com.google.protobuf.GeneratedMessage |
| implements VertexOrBinaryOrBuilder { |
| // Use VertexOrBinary.newBuilder() to construct. |
| private VertexOrBinary(com.google.protobuf.GeneratedMessage.Builder<?> builder) { |
| super(builder); |
| this.unknownFields = builder.getUnknownFields(); |
| } |
| private VertexOrBinary(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } |
| |
| private static final VertexOrBinary defaultInstance; |
| public static VertexOrBinary getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public VertexOrBinary getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| private final com.google.protobuf.UnknownFieldSet unknownFields; |
| @java.lang.Override |
| public final com.google.protobuf.UnknownFieldSet |
| getUnknownFields() { |
| return this.unknownFields; |
| } |
| private VertexOrBinary( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| initFields(); |
| int mutable_bitField0_ = 0; |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder(); |
| try { |
| boolean done = false; |
| while (!done) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| done = true; |
| break; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| done = true; |
| } |
| break; |
| } |
| case 10: { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec.Builder subBuilder = null; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| subBuilder = vertex_.toBuilder(); |
| } |
| vertex_ = input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec.PARSER, extensionRegistry); |
| if (subBuilder != null) { |
| subBuilder.mergeFrom(vertex_); |
| vertex_ = subBuilder.buildPartial(); |
| } |
| bitField0_ |= 0x00000001; |
| break; |
| } |
| case 18: { |
| bitField0_ |= 0x00000002; |
| vertexBinary_ = input.readBytes(); |
| break; |
| } |
| } |
| } |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| throw e.setUnfinishedMessage(this); |
| } catch (java.io.IOException e) { |
| throw new com.google.protobuf.InvalidProtocolBufferException( |
| e.getMessage()).setUnfinishedMessage(this); |
| } finally { |
| this.unknownFields = unknownFields.build(); |
| makeExtensionsImmutable(); |
| } |
| } |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_VertexOrBinary_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_VertexOrBinary_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.Builder.class); |
| } |
| |
| public static com.google.protobuf.Parser<VertexOrBinary> PARSER = |
| new com.google.protobuf.AbstractParser<VertexOrBinary>() { |
| public VertexOrBinary parsePartialFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return new VertexOrBinary(input, extensionRegistry); |
| } |
| }; |
| |
| @java.lang.Override |
| public com.google.protobuf.Parser<VertexOrBinary> getParserForType() { |
| return PARSER; |
| } |
| |
| private int bitField0_; |
| // optional .SignableVertexSpec vertex = 1; |
| public static final int VERTEX_FIELD_NUMBER = 1; |
| private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec vertex_; |
| /** |
| * <code>optional .SignableVertexSpec vertex = 1;</code> |
| */ |
| public boolean hasVertex() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional .SignableVertexSpec vertex = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec getVertex() { |
| return vertex_; |
| } |
| /** |
| * <code>optional .SignableVertexSpec vertex = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpecOrBuilder getVertexOrBuilder() { |
| return vertex_; |
| } |
| |
| // optional bytes vertexBinary = 2; |
| public static final int VERTEXBINARY_FIELD_NUMBER = 2; |
| private com.google.protobuf.ByteString vertexBinary_; |
| /** |
| * <code>optional bytes vertexBinary = 2;</code> |
| * |
| * <pre> |
| * SignableVertexSpec |
| * </pre> |
| */ |
| public boolean hasVertexBinary() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional bytes vertexBinary = 2;</code> |
| * |
| * <pre> |
| * SignableVertexSpec |
| * </pre> |
| */ |
| public com.google.protobuf.ByteString getVertexBinary() { |
| return vertexBinary_; |
| } |
| |
| private void initFields() { |
| vertex_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec.getDefaultInstance(); |
| vertexBinary_ = com.google.protobuf.ByteString.EMPTY; |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeMessage(1, vertex_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| output.writeBytes(2, vertexBinary_); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(1, vertex_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(2, vertexBinary_); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary) obj; |
| |
| boolean result = true; |
| result = result && (hasVertex() == other.hasVertex()); |
| if (hasVertex()) { |
| result = result && getVertex() |
| .equals(other.getVertex()); |
| } |
| result = result && (hasVertexBinary() == other.hasVertexBinary()); |
| if (hasVertexBinary()) { |
| result = result && getVertexBinary() |
| .equals(other.getVertexBinary()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| private int memoizedHashCode = 0; |
| @java.lang.Override |
| public int hashCode() { |
| if (memoizedHashCode != 0) { |
| return memoizedHashCode; |
| } |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasVertex()) { |
| hash = (37 * hash) + VERTEX_FIELD_NUMBER; |
| hash = (53 * hash) + getVertex().hashCode(); |
| } |
| if (hasVertexBinary()) { |
| hash = (37 * hash) + VERTEXBINARY_FIELD_NUMBER; |
| hash = (53 * hash) + getVertexBinary().hashCode(); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| memoizedHashCode = hash; |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| /** |
| * Protobuf type {@code VertexOrBinary} |
| * |
| * <pre> |
| * Union |
| * </pre> |
| */ |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinaryOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_VertexOrBinary_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_VertexOrBinary_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.Builder.class); |
| } |
| |
| // Construct using org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| getVertexFieldBuilder(); |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| if (vertexBuilder_ == null) { |
| vertex_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec.getDefaultInstance(); |
| } else { |
| vertexBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000001); |
| vertexBinary_ = com.google.protobuf.ByteString.EMPTY; |
| bitField0_ = (bitField0_ & ~0x00000002); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_VertexOrBinary_descriptor; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary getDefaultInstanceForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary build() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary buildPartial() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| if (vertexBuilder_ == null) { |
| result.vertex_ = vertex_; |
| } else { |
| result.vertex_ = vertexBuilder_.build(); |
| } |
| if (((from_bitField0_ & 0x00000002) == 0x00000002)) { |
| to_bitField0_ |= 0x00000002; |
| } |
| result.vertexBinary_ = vertexBinary_; |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary) { |
| return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary other) { |
| if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.getDefaultInstance()) return this; |
| if (other.hasVertex()) { |
| mergeVertex(other.getVertex()); |
| } |
| if (other.hasVertexBinary()) { |
| setVertexBinary(other.getVertexBinary()); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary parsedMessage = null; |
| try { |
| parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary) e.getUnfinishedMessage(); |
| throw e; |
| } finally { |
| if (parsedMessage != null) { |
| mergeFrom(parsedMessage); |
| } |
| } |
| return this; |
| } |
| private int bitField0_; |
| |
| // optional .SignableVertexSpec vertex = 1; |
| private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec vertex_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec.getDefaultInstance(); |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpecOrBuilder> vertexBuilder_; |
| /** |
| * <code>optional .SignableVertexSpec vertex = 1;</code> |
| */ |
| public boolean hasVertex() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional .SignableVertexSpec vertex = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec getVertex() { |
| if (vertexBuilder_ == null) { |
| return vertex_; |
| } else { |
| return vertexBuilder_.getMessage(); |
| } |
| } |
| /** |
| * <code>optional .SignableVertexSpec vertex = 1;</code> |
| */ |
| public Builder setVertex(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec value) { |
| if (vertexBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| vertex_ = value; |
| onChanged(); |
| } else { |
| vertexBuilder_.setMessage(value); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| /** |
| * <code>optional .SignableVertexSpec vertex = 1;</code> |
| */ |
| public Builder setVertex( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec.Builder builderForValue) { |
| if (vertexBuilder_ == null) { |
| vertex_ = builderForValue.build(); |
| onChanged(); |
| } else { |
| vertexBuilder_.setMessage(builderForValue.build()); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| /** |
| * <code>optional .SignableVertexSpec vertex = 1;</code> |
| */ |
| public Builder mergeVertex(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec value) { |
| if (vertexBuilder_ == null) { |
| if (((bitField0_ & 0x00000001) == 0x00000001) && |
| vertex_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec.getDefaultInstance()) { |
| vertex_ = |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec.newBuilder(vertex_).mergeFrom(value).buildPartial(); |
| } else { |
| vertex_ = value; |
| } |
| onChanged(); |
| } else { |
| vertexBuilder_.mergeFrom(value); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| /** |
| * <code>optional .SignableVertexSpec vertex = 1;</code> |
| */ |
| public Builder clearVertex() { |
| if (vertexBuilder_ == null) { |
| vertex_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec.getDefaultInstance(); |
| onChanged(); |
| } else { |
| vertexBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000001); |
| return this; |
| } |
| /** |
| * <code>optional .SignableVertexSpec vertex = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec.Builder getVertexBuilder() { |
| bitField0_ |= 0x00000001; |
| onChanged(); |
| return getVertexFieldBuilder().getBuilder(); |
| } |
| /** |
| * <code>optional .SignableVertexSpec vertex = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpecOrBuilder getVertexOrBuilder() { |
| if (vertexBuilder_ != null) { |
| return vertexBuilder_.getMessageOrBuilder(); |
| } else { |
| return vertex_; |
| } |
| } |
| /** |
| * <code>optional .SignableVertexSpec vertex = 1;</code> |
| */ |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpecOrBuilder> |
| getVertexFieldBuilder() { |
| if (vertexBuilder_ == null) { |
| vertexBuilder_ = new com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpecOrBuilder>( |
| vertex_, |
| getParentForChildren(), |
| isClean()); |
| vertex_ = null; |
| } |
| return vertexBuilder_; |
| } |
| |
| // optional bytes vertexBinary = 2; |
| private com.google.protobuf.ByteString vertexBinary_ = com.google.protobuf.ByteString.EMPTY; |
| /** |
| * <code>optional bytes vertexBinary = 2;</code> |
| * |
| * <pre> |
| * SignableVertexSpec |
| * </pre> |
| */ |
| public boolean hasVertexBinary() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional bytes vertexBinary = 2;</code> |
| * |
| * <pre> |
| * SignableVertexSpec |
| * </pre> |
| */ |
| public com.google.protobuf.ByteString getVertexBinary() { |
| return vertexBinary_; |
| } |
| /** |
| * <code>optional bytes vertexBinary = 2;</code> |
| * |
| * <pre> |
| * SignableVertexSpec |
| * </pre> |
| */ |
| public Builder setVertexBinary(com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000002; |
| vertexBinary_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional bytes vertexBinary = 2;</code> |
| * |
| * <pre> |
| * SignableVertexSpec |
| * </pre> |
| */ |
| public Builder clearVertexBinary() { |
| bitField0_ = (bitField0_ & ~0x00000002); |
| vertexBinary_ = getDefaultInstance().getVertexBinary(); |
| onChanged(); |
| return this; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:VertexOrBinary) |
| } |
| |
| static { |
| defaultInstance = new VertexOrBinary(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:VertexOrBinary) |
| } |
| |
| public interface FragmentRuntimeInfoOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // optional int32 num_self_and_upstream_tasks = 1; |
| /** |
| * <code>optional int32 num_self_and_upstream_tasks = 1;</code> |
| */ |
| boolean hasNumSelfAndUpstreamTasks(); |
| /** |
| * <code>optional int32 num_self_and_upstream_tasks = 1;</code> |
| */ |
| int getNumSelfAndUpstreamTasks(); |
| |
| // optional int32 num_self_and_upstream_completed_tasks = 2; |
| /** |
| * <code>optional int32 num_self_and_upstream_completed_tasks = 2;</code> |
| */ |
| boolean hasNumSelfAndUpstreamCompletedTasks(); |
| /** |
| * <code>optional int32 num_self_and_upstream_completed_tasks = 2;</code> |
| */ |
| int getNumSelfAndUpstreamCompletedTasks(); |
| |
| // optional int32 within_dag_priority = 3; |
| /** |
| * <code>optional int32 within_dag_priority = 3;</code> |
| */ |
| boolean hasWithinDagPriority(); |
| /** |
| * <code>optional int32 within_dag_priority = 3;</code> |
| */ |
| int getWithinDagPriority(); |
| |
| // optional int64 dag_start_time = 4; |
| /** |
| * <code>optional int64 dag_start_time = 4;</code> |
| */ |
| boolean hasDagStartTime(); |
| /** |
| * <code>optional int64 dag_start_time = 4;</code> |
| */ |
| long getDagStartTime(); |
| |
| // optional int64 first_attempt_start_time = 5; |
| /** |
| * <code>optional int64 first_attempt_start_time = 5;</code> |
| */ |
| boolean hasFirstAttemptStartTime(); |
| /** |
| * <code>optional int64 first_attempt_start_time = 5;</code> |
| */ |
| long getFirstAttemptStartTime(); |
| |
| // optional int64 current_attempt_start_time = 6; |
| /** |
| * <code>optional int64 current_attempt_start_time = 6;</code> |
| */ |
| boolean hasCurrentAttemptStartTime(); |
| /** |
| * <code>optional int64 current_attempt_start_time = 6;</code> |
| */ |
| long getCurrentAttemptStartTime(); |
| } |
| /** |
| * Protobuf type {@code FragmentRuntimeInfo} |
| */ |
| public static final class FragmentRuntimeInfo extends |
| com.google.protobuf.GeneratedMessage |
| implements FragmentRuntimeInfoOrBuilder { |
| // Use FragmentRuntimeInfo.newBuilder() to construct. |
| private FragmentRuntimeInfo(com.google.protobuf.GeneratedMessage.Builder<?> builder) { |
| super(builder); |
| this.unknownFields = builder.getUnknownFields(); |
| } |
| private FragmentRuntimeInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } |
| |
| private static final FragmentRuntimeInfo defaultInstance; |
| public static FragmentRuntimeInfo getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public FragmentRuntimeInfo getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| private final com.google.protobuf.UnknownFieldSet unknownFields; |
| @java.lang.Override |
| public final com.google.protobuf.UnknownFieldSet |
| getUnknownFields() { |
| return this.unknownFields; |
| } |
| private FragmentRuntimeInfo( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| initFields(); |
| int mutable_bitField0_ = 0; |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder(); |
| try { |
| boolean done = false; |
| while (!done) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| done = true; |
| break; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| done = true; |
| } |
| break; |
| } |
| case 8: { |
| bitField0_ |= 0x00000001; |
| numSelfAndUpstreamTasks_ = input.readInt32(); |
| break; |
| } |
| case 16: { |
| bitField0_ |= 0x00000002; |
| numSelfAndUpstreamCompletedTasks_ = input.readInt32(); |
| break; |
| } |
| case 24: { |
| bitField0_ |= 0x00000004; |
| withinDagPriority_ = input.readInt32(); |
| break; |
| } |
| case 32: { |
| bitField0_ |= 0x00000008; |
| dagStartTime_ = input.readInt64(); |
| break; |
| } |
| case 40: { |
| bitField0_ |= 0x00000010; |
| firstAttemptStartTime_ = input.readInt64(); |
| break; |
| } |
| case 48: { |
| bitField0_ |= 0x00000020; |
| currentAttemptStartTime_ = input.readInt64(); |
| break; |
| } |
| } |
| } |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| throw e.setUnfinishedMessage(this); |
| } catch (java.io.IOException e) { |
| throw new com.google.protobuf.InvalidProtocolBufferException( |
| e.getMessage()).setUnfinishedMessage(this); |
| } finally { |
| this.unknownFields = unknownFields.build(); |
| makeExtensionsImmutable(); |
| } |
| } |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_FragmentRuntimeInfo_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_FragmentRuntimeInfo_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.Builder.class); |
| } |
| |
| public static com.google.protobuf.Parser<FragmentRuntimeInfo> PARSER = |
| new com.google.protobuf.AbstractParser<FragmentRuntimeInfo>() { |
| public FragmentRuntimeInfo parsePartialFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return new FragmentRuntimeInfo(input, extensionRegistry); |
| } |
| }; |
| |
| @java.lang.Override |
| public com.google.protobuf.Parser<FragmentRuntimeInfo> getParserForType() { |
| return PARSER; |
| } |
| |
| private int bitField0_; |
| // optional int32 num_self_and_upstream_tasks = 1; |
| public static final int NUM_SELF_AND_UPSTREAM_TASKS_FIELD_NUMBER = 1; |
| private int numSelfAndUpstreamTasks_; |
| /** |
| * <code>optional int32 num_self_and_upstream_tasks = 1;</code> |
| */ |
| public boolean hasNumSelfAndUpstreamTasks() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional int32 num_self_and_upstream_tasks = 1;</code> |
| */ |
| public int getNumSelfAndUpstreamTasks() { |
| return numSelfAndUpstreamTasks_; |
| } |
| |
| // optional int32 num_self_and_upstream_completed_tasks = 2; |
| public static final int NUM_SELF_AND_UPSTREAM_COMPLETED_TASKS_FIELD_NUMBER = 2; |
| private int numSelfAndUpstreamCompletedTasks_; |
| /** |
| * <code>optional int32 num_self_and_upstream_completed_tasks = 2;</code> |
| */ |
| public boolean hasNumSelfAndUpstreamCompletedTasks() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional int32 num_self_and_upstream_completed_tasks = 2;</code> |
| */ |
| public int getNumSelfAndUpstreamCompletedTasks() { |
| return numSelfAndUpstreamCompletedTasks_; |
| } |
| |
| // optional int32 within_dag_priority = 3; |
| public static final int WITHIN_DAG_PRIORITY_FIELD_NUMBER = 3; |
| private int withinDagPriority_; |
| /** |
| * <code>optional int32 within_dag_priority = 3;</code> |
| */ |
| public boolean hasWithinDagPriority() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| /** |
| * <code>optional int32 within_dag_priority = 3;</code> |
| */ |
| public int getWithinDagPriority() { |
| return withinDagPriority_; |
| } |
| |
| // optional int64 dag_start_time = 4; |
| public static final int DAG_START_TIME_FIELD_NUMBER = 4; |
| private long dagStartTime_; |
| /** |
| * <code>optional int64 dag_start_time = 4;</code> |
| */ |
| public boolean hasDagStartTime() { |
| return ((bitField0_ & 0x00000008) == 0x00000008); |
| } |
| /** |
| * <code>optional int64 dag_start_time = 4;</code> |
| */ |
| public long getDagStartTime() { |
| return dagStartTime_; |
| } |
| |
| // optional int64 first_attempt_start_time = 5; |
| public static final int FIRST_ATTEMPT_START_TIME_FIELD_NUMBER = 5; |
| private long firstAttemptStartTime_; |
| /** |
| * <code>optional int64 first_attempt_start_time = 5;</code> |
| */ |
| public boolean hasFirstAttemptStartTime() { |
| return ((bitField0_ & 0x00000010) == 0x00000010); |
| } |
| /** |
| * <code>optional int64 first_attempt_start_time = 5;</code> |
| */ |
| public long getFirstAttemptStartTime() { |
| return firstAttemptStartTime_; |
| } |
| |
| // optional int64 current_attempt_start_time = 6; |
| public static final int CURRENT_ATTEMPT_START_TIME_FIELD_NUMBER = 6; |
| private long currentAttemptStartTime_; |
| /** |
| * <code>optional int64 current_attempt_start_time = 6;</code> |
| */ |
| public boolean hasCurrentAttemptStartTime() { |
| return ((bitField0_ & 0x00000020) == 0x00000020); |
| } |
| /** |
| * <code>optional int64 current_attempt_start_time = 6;</code> |
| */ |
| public long getCurrentAttemptStartTime() { |
| return currentAttemptStartTime_; |
| } |
| |
| private void initFields() { |
| numSelfAndUpstreamTasks_ = 0; |
| numSelfAndUpstreamCompletedTasks_ = 0; |
| withinDagPriority_ = 0; |
| dagStartTime_ = 0L; |
| firstAttemptStartTime_ = 0L; |
| currentAttemptStartTime_ = 0L; |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeInt32(1, numSelfAndUpstreamTasks_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| output.writeInt32(2, numSelfAndUpstreamCompletedTasks_); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| output.writeInt32(3, withinDagPriority_); |
| } |
| if (((bitField0_ & 0x00000008) == 0x00000008)) { |
| output.writeInt64(4, dagStartTime_); |
| } |
| if (((bitField0_ & 0x00000010) == 0x00000010)) { |
| output.writeInt64(5, firstAttemptStartTime_); |
| } |
| if (((bitField0_ & 0x00000020) == 0x00000020)) { |
| output.writeInt64(6, currentAttemptStartTime_); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeInt32Size(1, numSelfAndUpstreamTasks_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeInt32Size(2, numSelfAndUpstreamCompletedTasks_); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeInt32Size(3, withinDagPriority_); |
| } |
| if (((bitField0_ & 0x00000008) == 0x00000008)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeInt64Size(4, dagStartTime_); |
| } |
| if (((bitField0_ & 0x00000010) == 0x00000010)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeInt64Size(5, firstAttemptStartTime_); |
| } |
| if (((bitField0_ & 0x00000020) == 0x00000020)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeInt64Size(6, currentAttemptStartTime_); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo) obj; |
| |
| boolean result = true; |
| result = result && (hasNumSelfAndUpstreamTasks() == other.hasNumSelfAndUpstreamTasks()); |
| if (hasNumSelfAndUpstreamTasks()) { |
| result = result && (getNumSelfAndUpstreamTasks() |
| == other.getNumSelfAndUpstreamTasks()); |
| } |
| result = result && (hasNumSelfAndUpstreamCompletedTasks() == other.hasNumSelfAndUpstreamCompletedTasks()); |
| if (hasNumSelfAndUpstreamCompletedTasks()) { |
| result = result && (getNumSelfAndUpstreamCompletedTasks() |
| == other.getNumSelfAndUpstreamCompletedTasks()); |
| } |
| result = result && (hasWithinDagPriority() == other.hasWithinDagPriority()); |
| if (hasWithinDagPriority()) { |
| result = result && (getWithinDagPriority() |
| == other.getWithinDagPriority()); |
| } |
| result = result && (hasDagStartTime() == other.hasDagStartTime()); |
| if (hasDagStartTime()) { |
| result = result && (getDagStartTime() |
| == other.getDagStartTime()); |
| } |
| result = result && (hasFirstAttemptStartTime() == other.hasFirstAttemptStartTime()); |
| if (hasFirstAttemptStartTime()) { |
| result = result && (getFirstAttemptStartTime() |
| == other.getFirstAttemptStartTime()); |
| } |
| result = result && (hasCurrentAttemptStartTime() == other.hasCurrentAttemptStartTime()); |
| if (hasCurrentAttemptStartTime()) { |
| result = result && (getCurrentAttemptStartTime() |
| == other.getCurrentAttemptStartTime()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| private int memoizedHashCode = 0; |
| @java.lang.Override |
| public int hashCode() { |
| if (memoizedHashCode != 0) { |
| return memoizedHashCode; |
| } |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasNumSelfAndUpstreamTasks()) { |
| hash = (37 * hash) + NUM_SELF_AND_UPSTREAM_TASKS_FIELD_NUMBER; |
| hash = (53 * hash) + getNumSelfAndUpstreamTasks(); |
| } |
| if (hasNumSelfAndUpstreamCompletedTasks()) { |
| hash = (37 * hash) + NUM_SELF_AND_UPSTREAM_COMPLETED_TASKS_FIELD_NUMBER; |
| hash = (53 * hash) + getNumSelfAndUpstreamCompletedTasks(); |
| } |
| if (hasWithinDagPriority()) { |
| hash = (37 * hash) + WITHIN_DAG_PRIORITY_FIELD_NUMBER; |
| hash = (53 * hash) + getWithinDagPriority(); |
| } |
| if (hasDagStartTime()) { |
| hash = (37 * hash) + DAG_START_TIME_FIELD_NUMBER; |
| hash = (53 * hash) + hashLong(getDagStartTime()); |
| } |
| if (hasFirstAttemptStartTime()) { |
| hash = (37 * hash) + FIRST_ATTEMPT_START_TIME_FIELD_NUMBER; |
| hash = (53 * hash) + hashLong(getFirstAttemptStartTime()); |
| } |
| if (hasCurrentAttemptStartTime()) { |
| hash = (37 * hash) + CURRENT_ATTEMPT_START_TIME_FIELD_NUMBER; |
| hash = (53 * hash) + hashLong(getCurrentAttemptStartTime()); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| memoizedHashCode = hash; |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| /** |
| * Protobuf type {@code FragmentRuntimeInfo} |
| */ |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfoOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_FragmentRuntimeInfo_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_FragmentRuntimeInfo_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.Builder.class); |
| } |
| |
| // Construct using org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| numSelfAndUpstreamTasks_ = 0; |
| bitField0_ = (bitField0_ & ~0x00000001); |
| numSelfAndUpstreamCompletedTasks_ = 0; |
| bitField0_ = (bitField0_ & ~0x00000002); |
| withinDagPriority_ = 0; |
| bitField0_ = (bitField0_ & ~0x00000004); |
| dagStartTime_ = 0L; |
| bitField0_ = (bitField0_ & ~0x00000008); |
| firstAttemptStartTime_ = 0L; |
| bitField0_ = (bitField0_ & ~0x00000010); |
| currentAttemptStartTime_ = 0L; |
| bitField0_ = (bitField0_ & ~0x00000020); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_FragmentRuntimeInfo_descriptor; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo getDefaultInstanceForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo build() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo buildPartial() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| result.numSelfAndUpstreamTasks_ = numSelfAndUpstreamTasks_; |
| if (((from_bitField0_ & 0x00000002) == 0x00000002)) { |
| to_bitField0_ |= 0x00000002; |
| } |
| result.numSelfAndUpstreamCompletedTasks_ = numSelfAndUpstreamCompletedTasks_; |
| if (((from_bitField0_ & 0x00000004) == 0x00000004)) { |
| to_bitField0_ |= 0x00000004; |
| } |
| result.withinDagPriority_ = withinDagPriority_; |
| if (((from_bitField0_ & 0x00000008) == 0x00000008)) { |
| to_bitField0_ |= 0x00000008; |
| } |
| result.dagStartTime_ = dagStartTime_; |
| if (((from_bitField0_ & 0x00000010) == 0x00000010)) { |
| to_bitField0_ |= 0x00000010; |
| } |
| result.firstAttemptStartTime_ = firstAttemptStartTime_; |
| if (((from_bitField0_ & 0x00000020) == 0x00000020)) { |
| to_bitField0_ |= 0x00000020; |
| } |
| result.currentAttemptStartTime_ = currentAttemptStartTime_; |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo) { |
| return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo other) { |
| if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.getDefaultInstance()) return this; |
| if (other.hasNumSelfAndUpstreamTasks()) { |
| setNumSelfAndUpstreamTasks(other.getNumSelfAndUpstreamTasks()); |
| } |
| if (other.hasNumSelfAndUpstreamCompletedTasks()) { |
| setNumSelfAndUpstreamCompletedTasks(other.getNumSelfAndUpstreamCompletedTasks()); |
| } |
| if (other.hasWithinDagPriority()) { |
| setWithinDagPriority(other.getWithinDagPriority()); |
| } |
| if (other.hasDagStartTime()) { |
| setDagStartTime(other.getDagStartTime()); |
| } |
| if (other.hasFirstAttemptStartTime()) { |
| setFirstAttemptStartTime(other.getFirstAttemptStartTime()); |
| } |
| if (other.hasCurrentAttemptStartTime()) { |
| setCurrentAttemptStartTime(other.getCurrentAttemptStartTime()); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo parsedMessage = null; |
| try { |
| parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo) e.getUnfinishedMessage(); |
| throw e; |
| } finally { |
| if (parsedMessage != null) { |
| mergeFrom(parsedMessage); |
| } |
| } |
| return this; |
| } |
| private int bitField0_; |
| |
| // optional int32 num_self_and_upstream_tasks = 1; |
| private int numSelfAndUpstreamTasks_ ; |
| /** |
| * <code>optional int32 num_self_and_upstream_tasks = 1;</code> |
| */ |
| public boolean hasNumSelfAndUpstreamTasks() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional int32 num_self_and_upstream_tasks = 1;</code> |
| */ |
| public int getNumSelfAndUpstreamTasks() { |
| return numSelfAndUpstreamTasks_; |
| } |
| /** |
| * <code>optional int32 num_self_and_upstream_tasks = 1;</code> |
| */ |
| public Builder setNumSelfAndUpstreamTasks(int value) { |
| bitField0_ |= 0x00000001; |
| numSelfAndUpstreamTasks_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional int32 num_self_and_upstream_tasks = 1;</code> |
| */ |
| public Builder clearNumSelfAndUpstreamTasks() { |
| bitField0_ = (bitField0_ & ~0x00000001); |
| numSelfAndUpstreamTasks_ = 0; |
| onChanged(); |
| return this; |
| } |
| |
| // optional int32 num_self_and_upstream_completed_tasks = 2; |
| private int numSelfAndUpstreamCompletedTasks_ ; |
| /** |
| * <code>optional int32 num_self_and_upstream_completed_tasks = 2;</code> |
| */ |
| public boolean hasNumSelfAndUpstreamCompletedTasks() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional int32 num_self_and_upstream_completed_tasks = 2;</code> |
| */ |
| public int getNumSelfAndUpstreamCompletedTasks() { |
| return numSelfAndUpstreamCompletedTasks_; |
| } |
| /** |
| * <code>optional int32 num_self_and_upstream_completed_tasks = 2;</code> |
| */ |
| public Builder setNumSelfAndUpstreamCompletedTasks(int value) { |
| bitField0_ |= 0x00000002; |
| numSelfAndUpstreamCompletedTasks_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional int32 num_self_and_upstream_completed_tasks = 2;</code> |
| */ |
| public Builder clearNumSelfAndUpstreamCompletedTasks() { |
| bitField0_ = (bitField0_ & ~0x00000002); |
| numSelfAndUpstreamCompletedTasks_ = 0; |
| onChanged(); |
| return this; |
| } |
| |
| // optional int32 within_dag_priority = 3; |
| private int withinDagPriority_ ; |
| /** |
| * <code>optional int32 within_dag_priority = 3;</code> |
| */ |
| public boolean hasWithinDagPriority() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| /** |
| * <code>optional int32 within_dag_priority = 3;</code> |
| */ |
| public int getWithinDagPriority() { |
| return withinDagPriority_; |
| } |
| /** |
| * <code>optional int32 within_dag_priority = 3;</code> |
| */ |
| public Builder setWithinDagPriority(int value) { |
| bitField0_ |= 0x00000004; |
| withinDagPriority_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional int32 within_dag_priority = 3;</code> |
| */ |
| public Builder clearWithinDagPriority() { |
| bitField0_ = (bitField0_ & ~0x00000004); |
| withinDagPriority_ = 0; |
| onChanged(); |
| return this; |
| } |
| |
| // optional int64 dag_start_time = 4; |
| private long dagStartTime_ ; |
| /** |
| * <code>optional int64 dag_start_time = 4;</code> |
| */ |
| public boolean hasDagStartTime() { |
| return ((bitField0_ & 0x00000008) == 0x00000008); |
| } |
| /** |
| * <code>optional int64 dag_start_time = 4;</code> |
| */ |
| public long getDagStartTime() { |
| return dagStartTime_; |
| } |
| /** |
| * <code>optional int64 dag_start_time = 4;</code> |
| */ |
| public Builder setDagStartTime(long value) { |
| bitField0_ |= 0x00000008; |
| dagStartTime_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional int64 dag_start_time = 4;</code> |
| */ |
| public Builder clearDagStartTime() { |
| bitField0_ = (bitField0_ & ~0x00000008); |
| dagStartTime_ = 0L; |
| onChanged(); |
| return this; |
| } |
| |
| // optional int64 first_attempt_start_time = 5; |
| private long firstAttemptStartTime_ ; |
| /** |
| * <code>optional int64 first_attempt_start_time = 5;</code> |
| */ |
| public boolean hasFirstAttemptStartTime() { |
| return ((bitField0_ & 0x00000010) == 0x00000010); |
| } |
| /** |
| * <code>optional int64 first_attempt_start_time = 5;</code> |
| */ |
| public long getFirstAttemptStartTime() { |
| return firstAttemptStartTime_; |
| } |
| /** |
| * <code>optional int64 first_attempt_start_time = 5;</code> |
| */ |
| public Builder setFirstAttemptStartTime(long value) { |
| bitField0_ |= 0x00000010; |
| firstAttemptStartTime_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional int64 first_attempt_start_time = 5;</code> |
| */ |
| public Builder clearFirstAttemptStartTime() { |
| bitField0_ = (bitField0_ & ~0x00000010); |
| firstAttemptStartTime_ = 0L; |
| onChanged(); |
| return this; |
| } |
| |
| // optional int64 current_attempt_start_time = 6; |
| private long currentAttemptStartTime_ ; |
| /** |
| * <code>optional int64 current_attempt_start_time = 6;</code> |
| */ |
| public boolean hasCurrentAttemptStartTime() { |
| return ((bitField0_ & 0x00000020) == 0x00000020); |
| } |
| /** |
| * <code>optional int64 current_attempt_start_time = 6;</code> |
| */ |
| public long getCurrentAttemptStartTime() { |
| return currentAttemptStartTime_; |
| } |
| /** |
| * <code>optional int64 current_attempt_start_time = 6;</code> |
| */ |
| public Builder setCurrentAttemptStartTime(long value) { |
| bitField0_ |= 0x00000020; |
| currentAttemptStartTime_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional int64 current_attempt_start_time = 6;</code> |
| */ |
| public Builder clearCurrentAttemptStartTime() { |
| bitField0_ = (bitField0_ & ~0x00000020); |
| currentAttemptStartTime_ = 0L; |
| onChanged(); |
| return this; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:FragmentRuntimeInfo) |
| } |
| |
| static { |
| defaultInstance = new FragmentRuntimeInfo(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:FragmentRuntimeInfo) |
| } |
| |
| public interface QueryIdentifierProtoOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // optional string application_id_string = 1; |
| /** |
| * <code>optional string application_id_string = 1;</code> |
| */ |
| boolean hasApplicationIdString(); |
| /** |
| * <code>optional string application_id_string = 1;</code> |
| */ |
| java.lang.String getApplicationIdString(); |
| /** |
| * <code>optional string application_id_string = 1;</code> |
| */ |
| com.google.protobuf.ByteString |
| getApplicationIdStringBytes(); |
| |
| // optional int32 dag_index = 2; |
| /** |
| * <code>optional int32 dag_index = 2;</code> |
| */ |
| boolean hasDagIndex(); |
| /** |
| * <code>optional int32 dag_index = 2;</code> |
| */ |
| int getDagIndex(); |
| |
| // optional int32 app_attempt_number = 3; |
| /** |
| * <code>optional int32 app_attempt_number = 3;</code> |
| */ |
| boolean hasAppAttemptNumber(); |
| /** |
| * <code>optional int32 app_attempt_number = 3;</code> |
| */ |
| int getAppAttemptNumber(); |
| } |
| /** |
| * Protobuf type {@code QueryIdentifierProto} |
| */ |
| public static final class QueryIdentifierProto extends |
| com.google.protobuf.GeneratedMessage |
| implements QueryIdentifierProtoOrBuilder { |
| // Use QueryIdentifierProto.newBuilder() to construct. |
| private QueryIdentifierProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) { |
| super(builder); |
| this.unknownFields = builder.getUnknownFields(); |
| } |
| private QueryIdentifierProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } |
| |
| private static final QueryIdentifierProto defaultInstance; |
| public static QueryIdentifierProto getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public QueryIdentifierProto getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| private final com.google.protobuf.UnknownFieldSet unknownFields; |
| @java.lang.Override |
| public final com.google.protobuf.UnknownFieldSet |
| getUnknownFields() { |
| return this.unknownFields; |
| } |
| private QueryIdentifierProto( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| initFields(); |
| int mutable_bitField0_ = 0; |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder(); |
| try { |
| boolean done = false; |
| while (!done) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| done = true; |
| break; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| done = true; |
| } |
| break; |
| } |
| case 10: { |
| bitField0_ |= 0x00000001; |
| applicationIdString_ = input.readBytes(); |
| break; |
| } |
| case 16: { |
| bitField0_ |= 0x00000002; |
| dagIndex_ = input.readInt32(); |
| break; |
| } |
| case 24: { |
| bitField0_ |= 0x00000004; |
| appAttemptNumber_ = input.readInt32(); |
| break; |
| } |
| } |
| } |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| throw e.setUnfinishedMessage(this); |
| } catch (java.io.IOException e) { |
| throw new com.google.protobuf.InvalidProtocolBufferException( |
| e.getMessage()).setUnfinishedMessage(this); |
| } finally { |
| this.unknownFields = unknownFields.build(); |
| makeExtensionsImmutable(); |
| } |
| } |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryIdentifierProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryIdentifierProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder.class); |
| } |
| |
| public static com.google.protobuf.Parser<QueryIdentifierProto> PARSER = |
| new com.google.protobuf.AbstractParser<QueryIdentifierProto>() { |
| public QueryIdentifierProto parsePartialFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return new QueryIdentifierProto(input, extensionRegistry); |
| } |
| }; |
| |
| @java.lang.Override |
| public com.google.protobuf.Parser<QueryIdentifierProto> getParserForType() { |
| return PARSER; |
| } |
| |
| private int bitField0_; |
| // optional string application_id_string = 1; |
| public static final int APPLICATION_ID_STRING_FIELD_NUMBER = 1; |
| private java.lang.Object applicationIdString_; |
| /** |
| * <code>optional string application_id_string = 1;</code> |
| */ |
| public boolean hasApplicationIdString() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional string application_id_string = 1;</code> |
| */ |
| public java.lang.String getApplicationIdString() { |
| java.lang.Object ref = applicationIdString_; |
| if (ref instanceof java.lang.String) { |
| return (java.lang.String) ref; |
| } else { |
| com.google.protobuf.ByteString bs = |
| (com.google.protobuf.ByteString) ref; |
| java.lang.String s = bs.toStringUtf8(); |
| if (bs.isValidUtf8()) { |
| applicationIdString_ = s; |
| } |
| return s; |
| } |
| } |
| /** |
| * <code>optional string application_id_string = 1;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getApplicationIdStringBytes() { |
| java.lang.Object ref = applicationIdString_; |
| if (ref instanceof java.lang.String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| applicationIdString_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| |
| // optional int32 dag_index = 2; |
| public static final int DAG_INDEX_FIELD_NUMBER = 2; |
| private int dagIndex_; |
| /** |
| * <code>optional int32 dag_index = 2;</code> |
| */ |
| public boolean hasDagIndex() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional int32 dag_index = 2;</code> |
| */ |
| public int getDagIndex() { |
| return dagIndex_; |
| } |
| |
| // optional int32 app_attempt_number = 3; |
| public static final int APP_ATTEMPT_NUMBER_FIELD_NUMBER = 3; |
| private int appAttemptNumber_; |
| /** |
| * <code>optional int32 app_attempt_number = 3;</code> |
| */ |
| public boolean hasAppAttemptNumber() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| /** |
| * <code>optional int32 app_attempt_number = 3;</code> |
| */ |
| public int getAppAttemptNumber() { |
| return appAttemptNumber_; |
| } |
| |
| private void initFields() { |
| applicationIdString_ = ""; |
| dagIndex_ = 0; |
| appAttemptNumber_ = 0; |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeBytes(1, getApplicationIdStringBytes()); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| output.writeInt32(2, dagIndex_); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| output.writeInt32(3, appAttemptNumber_); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(1, getApplicationIdStringBytes()); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeInt32Size(2, dagIndex_); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeInt32Size(3, appAttemptNumber_); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto) obj; |
| |
| boolean result = true; |
| result = result && (hasApplicationIdString() == other.hasApplicationIdString()); |
| if (hasApplicationIdString()) { |
| result = result && getApplicationIdString() |
| .equals(other.getApplicationIdString()); |
| } |
| result = result && (hasDagIndex() == other.hasDagIndex()); |
| if (hasDagIndex()) { |
| result = result && (getDagIndex() |
| == other.getDagIndex()); |
| } |
| result = result && (hasAppAttemptNumber() == other.hasAppAttemptNumber()); |
| if (hasAppAttemptNumber()) { |
| result = result && (getAppAttemptNumber() |
| == other.getAppAttemptNumber()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| private int memoizedHashCode = 0; |
| @java.lang.Override |
| public int hashCode() { |
| if (memoizedHashCode != 0) { |
| return memoizedHashCode; |
| } |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasApplicationIdString()) { |
| hash = (37 * hash) + APPLICATION_ID_STRING_FIELD_NUMBER; |
| hash = (53 * hash) + getApplicationIdString().hashCode(); |
| } |
| if (hasDagIndex()) { |
| hash = (37 * hash) + DAG_INDEX_FIELD_NUMBER; |
| hash = (53 * hash) + getDagIndex(); |
| } |
| if (hasAppAttemptNumber()) { |
| hash = (37 * hash) + APP_ATTEMPT_NUMBER_FIELD_NUMBER; |
| hash = (53 * hash) + getAppAttemptNumber(); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| memoizedHashCode = hash; |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| /** |
| * Protobuf type {@code QueryIdentifierProto} |
| */ |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryIdentifierProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryIdentifierProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder.class); |
| } |
| |
| // Construct using org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| applicationIdString_ = ""; |
| bitField0_ = (bitField0_ & ~0x00000001); |
| dagIndex_ = 0; |
| bitField0_ = (bitField0_ & ~0x00000002); |
| appAttemptNumber_ = 0; |
| bitField0_ = (bitField0_ & ~0x00000004); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryIdentifierProto_descriptor; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getDefaultInstanceForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto build() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto buildPartial() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| result.applicationIdString_ = applicationIdString_; |
| if (((from_bitField0_ & 0x00000002) == 0x00000002)) { |
| to_bitField0_ |= 0x00000002; |
| } |
| result.dagIndex_ = dagIndex_; |
| if (((from_bitField0_ & 0x00000004) == 0x00000004)) { |
| to_bitField0_ |= 0x00000004; |
| } |
| result.appAttemptNumber_ = appAttemptNumber_; |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto) { |
| return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto other) { |
| if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance()) return this; |
| if (other.hasApplicationIdString()) { |
| bitField0_ |= 0x00000001; |
| applicationIdString_ = other.applicationIdString_; |
| onChanged(); |
| } |
| if (other.hasDagIndex()) { |
| setDagIndex(other.getDagIndex()); |
| } |
| if (other.hasAppAttemptNumber()) { |
| setAppAttemptNumber(other.getAppAttemptNumber()); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto parsedMessage = null; |
| try { |
| parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto) e.getUnfinishedMessage(); |
| throw e; |
| } finally { |
| if (parsedMessage != null) { |
| mergeFrom(parsedMessage); |
| } |
| } |
| return this; |
| } |
| private int bitField0_; |
| |
| // optional string application_id_string = 1; |
| private java.lang.Object applicationIdString_ = ""; |
| /** |
| * <code>optional string application_id_string = 1;</code> |
| */ |
| public boolean hasApplicationIdString() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional string application_id_string = 1;</code> |
| */ |
| public java.lang.String getApplicationIdString() { |
| java.lang.Object ref = applicationIdString_; |
| if (!(ref instanceof java.lang.String)) { |
| java.lang.String s = ((com.google.protobuf.ByteString) ref) |
| .toStringUtf8(); |
| applicationIdString_ = s; |
| return s; |
| } else { |
| return (java.lang.String) ref; |
| } |
| } |
| /** |
| * <code>optional string application_id_string = 1;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getApplicationIdStringBytes() { |
| java.lang.Object ref = applicationIdString_; |
| if (ref instanceof String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| applicationIdString_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| /** |
| * <code>optional string application_id_string = 1;</code> |
| */ |
| public Builder setApplicationIdString( |
| java.lang.String value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000001; |
| applicationIdString_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string application_id_string = 1;</code> |
| */ |
| public Builder clearApplicationIdString() { |
| bitField0_ = (bitField0_ & ~0x00000001); |
| applicationIdString_ = getDefaultInstance().getApplicationIdString(); |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string application_id_string = 1;</code> |
| */ |
| public Builder setApplicationIdStringBytes( |
| com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000001; |
| applicationIdString_ = value; |
| onChanged(); |
| return this; |
| } |
| |
| // optional int32 dag_index = 2; |
| private int dagIndex_ ; |
| /** |
| * <code>optional int32 dag_index = 2;</code> |
| */ |
| public boolean hasDagIndex() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional int32 dag_index = 2;</code> |
| */ |
| public int getDagIndex() { |
| return dagIndex_; |
| } |
| /** |
| * <code>optional int32 dag_index = 2;</code> |
| */ |
| public Builder setDagIndex(int value) { |
| bitField0_ |= 0x00000002; |
| dagIndex_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional int32 dag_index = 2;</code> |
| */ |
| public Builder clearDagIndex() { |
| bitField0_ = (bitField0_ & ~0x00000002); |
| dagIndex_ = 0; |
| onChanged(); |
| return this; |
| } |
| |
| // optional int32 app_attempt_number = 3; |
| private int appAttemptNumber_ ; |
| /** |
| * <code>optional int32 app_attempt_number = 3;</code> |
| */ |
| public boolean hasAppAttemptNumber() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| /** |
| * <code>optional int32 app_attempt_number = 3;</code> |
| */ |
| public int getAppAttemptNumber() { |
| return appAttemptNumber_; |
| } |
| /** |
| * <code>optional int32 app_attempt_number = 3;</code> |
| */ |
| public Builder setAppAttemptNumber(int value) { |
| bitField0_ |= 0x00000004; |
| appAttemptNumber_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional int32 app_attempt_number = 3;</code> |
| */ |
| public Builder clearAppAttemptNumber() { |
| bitField0_ = (bitField0_ & ~0x00000004); |
| appAttemptNumber_ = 0; |
| onChanged(); |
| return this; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:QueryIdentifierProto) |
| } |
| |
| static { |
| defaultInstance = new QueryIdentifierProto(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:QueryIdentifierProto) |
| } |
| |
| public interface NotTezEventOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // required bytes input_event_proto_bytes = 1; |
| /** |
| * <code>required bytes input_event_proto_bytes = 1;</code> |
| */ |
| boolean hasInputEventProtoBytes(); |
| /** |
| * <code>required bytes input_event_proto_bytes = 1;</code> |
| */ |
| com.google.protobuf.ByteString getInputEventProtoBytes(); |
| |
| // required string vertex_name = 2; |
| /** |
| * <code>required string vertex_name = 2;</code> |
| */ |
| boolean hasVertexName(); |
| /** |
| * <code>required string vertex_name = 2;</code> |
| */ |
| java.lang.String getVertexName(); |
| /** |
| * <code>required string vertex_name = 2;</code> |
| */ |
| com.google.protobuf.ByteString |
| getVertexNameBytes(); |
| |
| // required string dest_input_name = 3; |
| /** |
| * <code>required string dest_input_name = 3;</code> |
| */ |
| boolean hasDestInputName(); |
| /** |
| * <code>required string dest_input_name = 3;</code> |
| */ |
| java.lang.String getDestInputName(); |
| /** |
| * <code>required string dest_input_name = 3;</code> |
| */ |
| com.google.protobuf.ByteString |
| getDestInputNameBytes(); |
| |
| // optional int32 key_id = 4; |
| /** |
| * <code>optional int32 key_id = 4;</code> |
| */ |
| boolean hasKeyId(); |
| /** |
| * <code>optional int32 key_id = 4;</code> |
| */ |
| int getKeyId(); |
| } |
| /** |
| * Protobuf type {@code NotTezEvent} |
| * |
| * <pre> |
| ** |
| * Tez API implementation derives an enum value from instanceof on the event, then uses that enum |
| * in a giant switch statement to re-implement virtual functions. In addition the event classes |
| * are final and serialization is a mix of writables and protobuf. As a result, it is impossible |
| * three times over to add anything there. So, we'd do our own "inspired" serialization. |
| * Eventually we'll move away from events for API. |
| * </pre> |
| */ |
| public static final class NotTezEvent extends |
| com.google.protobuf.GeneratedMessage |
| implements NotTezEventOrBuilder { |
| // Use NotTezEvent.newBuilder() to construct. |
| private NotTezEvent(com.google.protobuf.GeneratedMessage.Builder<?> builder) { |
| super(builder); |
| this.unknownFields = builder.getUnknownFields(); |
| } |
| private NotTezEvent(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } |
| |
| private static final NotTezEvent defaultInstance; |
| public static NotTezEvent getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public NotTezEvent getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| private final com.google.protobuf.UnknownFieldSet unknownFields; |
| @java.lang.Override |
| public final com.google.protobuf.UnknownFieldSet |
| getUnknownFields() { |
| return this.unknownFields; |
| } |
| private NotTezEvent( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| initFields(); |
| int mutable_bitField0_ = 0; |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder(); |
| try { |
| boolean done = false; |
| while (!done) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| done = true; |
| break; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| done = true; |
| } |
| break; |
| } |
| case 10: { |
| bitField0_ |= 0x00000001; |
| inputEventProtoBytes_ = input.readBytes(); |
| break; |
| } |
| case 18: { |
| bitField0_ |= 0x00000002; |
| vertexName_ = input.readBytes(); |
| break; |
| } |
| case 26: { |
| bitField0_ |= 0x00000004; |
| destInputName_ = input.readBytes(); |
| break; |
| } |
| case 32: { |
| bitField0_ |= 0x00000008; |
| keyId_ = input.readInt32(); |
| break; |
| } |
| } |
| } |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| throw e.setUnfinishedMessage(this); |
| } catch (java.io.IOException e) { |
| throw new com.google.protobuf.InvalidProtocolBufferException( |
| e.getMessage()).setUnfinishedMessage(this); |
| } finally { |
| this.unknownFields = unknownFields.build(); |
| makeExtensionsImmutable(); |
| } |
| } |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_NotTezEvent_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_NotTezEvent_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent.Builder.class); |
| } |
| |
| public static com.google.protobuf.Parser<NotTezEvent> PARSER = |
| new com.google.protobuf.AbstractParser<NotTezEvent>() { |
| public NotTezEvent parsePartialFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return new NotTezEvent(input, extensionRegistry); |
| } |
| }; |
| |
| @java.lang.Override |
| public com.google.protobuf.Parser<NotTezEvent> getParserForType() { |
| return PARSER; |
| } |
| |
| private int bitField0_; |
| // required bytes input_event_proto_bytes = 1; |
| public static final int INPUT_EVENT_PROTO_BYTES_FIELD_NUMBER = 1; |
| private com.google.protobuf.ByteString inputEventProtoBytes_; |
| /** |
| * <code>required bytes input_event_proto_bytes = 1;</code> |
| */ |
| public boolean hasInputEventProtoBytes() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>required bytes input_event_proto_bytes = 1;</code> |
| */ |
| public com.google.protobuf.ByteString getInputEventProtoBytes() { |
| return inputEventProtoBytes_; |
| } |
| |
| // required string vertex_name = 2; |
| public static final int VERTEX_NAME_FIELD_NUMBER = 2; |
| private java.lang.Object vertexName_; |
| /** |
| * <code>required string vertex_name = 2;</code> |
| */ |
| public boolean hasVertexName() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>required string vertex_name = 2;</code> |
| */ |
| public java.lang.String getVertexName() { |
| java.lang.Object ref = vertexName_; |
| if (ref instanceof java.lang.String) { |
| return (java.lang.String) ref; |
| } else { |
| com.google.protobuf.ByteString bs = |
| (com.google.protobuf.ByteString) ref; |
| java.lang.String s = bs.toStringUtf8(); |
| if (bs.isValidUtf8()) { |
| vertexName_ = s; |
| } |
| return s; |
| } |
| } |
| /** |
| * <code>required string vertex_name = 2;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getVertexNameBytes() { |
| java.lang.Object ref = vertexName_; |
| if (ref instanceof java.lang.String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| vertexName_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| |
| // required string dest_input_name = 3; |
| public static final int DEST_INPUT_NAME_FIELD_NUMBER = 3; |
| private java.lang.Object destInputName_; |
| /** |
| * <code>required string dest_input_name = 3;</code> |
| */ |
| public boolean hasDestInputName() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| /** |
| * <code>required string dest_input_name = 3;</code> |
| */ |
| public java.lang.String getDestInputName() { |
| java.lang.Object ref = destInputName_; |
| if (ref instanceof java.lang.String) { |
| return (java.lang.String) ref; |
| } else { |
| com.google.protobuf.ByteString bs = |
| (com.google.protobuf.ByteString) ref; |
| java.lang.String s = bs.toStringUtf8(); |
| if (bs.isValidUtf8()) { |
| destInputName_ = s; |
| } |
| return s; |
| } |
| } |
| /** |
| * <code>required string dest_input_name = 3;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getDestInputNameBytes() { |
| java.lang.Object ref = destInputName_; |
| if (ref instanceof java.lang.String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| destInputName_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| |
| // optional int32 key_id = 4; |
| public static final int KEY_ID_FIELD_NUMBER = 4; |
| private int keyId_; |
| /** |
| * <code>optional int32 key_id = 4;</code> |
| */ |
| public boolean hasKeyId() { |
| return ((bitField0_ & 0x00000008) == 0x00000008); |
| } |
| /** |
| * <code>optional int32 key_id = 4;</code> |
| */ |
| public int getKeyId() { |
| return keyId_; |
| } |
| |
| private void initFields() { |
| inputEventProtoBytes_ = com.google.protobuf.ByteString.EMPTY; |
| vertexName_ = ""; |
| destInputName_ = ""; |
| keyId_ = 0; |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| if (!hasInputEventProtoBytes()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| if (!hasVertexName()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| if (!hasDestInputName()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeBytes(1, inputEventProtoBytes_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| output.writeBytes(2, getVertexNameBytes()); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| output.writeBytes(3, getDestInputNameBytes()); |
| } |
| if (((bitField0_ & 0x00000008) == 0x00000008)) { |
| output.writeInt32(4, keyId_); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(1, inputEventProtoBytes_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(2, getVertexNameBytes()); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(3, getDestInputNameBytes()); |
| } |
| if (((bitField0_ & 0x00000008) == 0x00000008)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeInt32Size(4, keyId_); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent) obj; |
| |
| boolean result = true; |
| result = result && (hasInputEventProtoBytes() == other.hasInputEventProtoBytes()); |
| if (hasInputEventProtoBytes()) { |
| result = result && getInputEventProtoBytes() |
| .equals(other.getInputEventProtoBytes()); |
| } |
| result = result && (hasVertexName() == other.hasVertexName()); |
| if (hasVertexName()) { |
| result = result && getVertexName() |
| .equals(other.getVertexName()); |
| } |
| result = result && (hasDestInputName() == other.hasDestInputName()); |
| if (hasDestInputName()) { |
| result = result && getDestInputName() |
| .equals(other.getDestInputName()); |
| } |
| result = result && (hasKeyId() == other.hasKeyId()); |
| if (hasKeyId()) { |
| result = result && (getKeyId() |
| == other.getKeyId()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| private int memoizedHashCode = 0; |
| @java.lang.Override |
| public int hashCode() { |
| if (memoizedHashCode != 0) { |
| return memoizedHashCode; |
| } |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasInputEventProtoBytes()) { |
| hash = (37 * hash) + INPUT_EVENT_PROTO_BYTES_FIELD_NUMBER; |
| hash = (53 * hash) + getInputEventProtoBytes().hashCode(); |
| } |
| if (hasVertexName()) { |
| hash = (37 * hash) + VERTEX_NAME_FIELD_NUMBER; |
| hash = (53 * hash) + getVertexName().hashCode(); |
| } |
| if (hasDestInputName()) { |
| hash = (37 * hash) + DEST_INPUT_NAME_FIELD_NUMBER; |
| hash = (53 * hash) + getDestInputName().hashCode(); |
| } |
| if (hasKeyId()) { |
| hash = (37 * hash) + KEY_ID_FIELD_NUMBER; |
| hash = (53 * hash) + getKeyId(); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| memoizedHashCode = hash; |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| /** |
| * Protobuf type {@code NotTezEvent} |
| * |
| * <pre> |
| ** |
| * Tez API implementation derives an enum value from instanceof on the event, then uses that enum |
| * in a giant switch statement to re-implement virtual functions. In addition the event classes |
| * are final and serialization is a mix of writables and protobuf. As a result, it is impossible |
| * three times over to add anything there. So, we'd do our own "inspired" serialization. |
| * Eventually we'll move away from events for API. |
| * </pre> |
| */ |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEventOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_NotTezEvent_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_NotTezEvent_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent.Builder.class); |
| } |
| |
| // Construct using org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| inputEventProtoBytes_ = com.google.protobuf.ByteString.EMPTY; |
| bitField0_ = (bitField0_ & ~0x00000001); |
| vertexName_ = ""; |
| bitField0_ = (bitField0_ & ~0x00000002); |
| destInputName_ = ""; |
| bitField0_ = (bitField0_ & ~0x00000004); |
| keyId_ = 0; |
| bitField0_ = (bitField0_ & ~0x00000008); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_NotTezEvent_descriptor; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent getDefaultInstanceForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent build() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent buildPartial() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| result.inputEventProtoBytes_ = inputEventProtoBytes_; |
| if (((from_bitField0_ & 0x00000002) == 0x00000002)) { |
| to_bitField0_ |= 0x00000002; |
| } |
| result.vertexName_ = vertexName_; |
| if (((from_bitField0_ & 0x00000004) == 0x00000004)) { |
| to_bitField0_ |= 0x00000004; |
| } |
| result.destInputName_ = destInputName_; |
| if (((from_bitField0_ & 0x00000008) == 0x00000008)) { |
| to_bitField0_ |= 0x00000008; |
| } |
| result.keyId_ = keyId_; |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent) { |
| return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent other) { |
| if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent.getDefaultInstance()) return this; |
| if (other.hasInputEventProtoBytes()) { |
| setInputEventProtoBytes(other.getInputEventProtoBytes()); |
| } |
| if (other.hasVertexName()) { |
| bitField0_ |= 0x00000002; |
| vertexName_ = other.vertexName_; |
| onChanged(); |
| } |
| if (other.hasDestInputName()) { |
| bitField0_ |= 0x00000004; |
| destInputName_ = other.destInputName_; |
| onChanged(); |
| } |
| if (other.hasKeyId()) { |
| setKeyId(other.getKeyId()); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| if (!hasInputEventProtoBytes()) { |
| |
| return false; |
| } |
| if (!hasVertexName()) { |
| |
| return false; |
| } |
| if (!hasDestInputName()) { |
| |
| return false; |
| } |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent parsedMessage = null; |
| try { |
| parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.NotTezEvent) e.getUnfinishedMessage(); |
| throw e; |
| } finally { |
| if (parsedMessage != null) { |
| mergeFrom(parsedMessage); |
| } |
| } |
| return this; |
| } |
| private int bitField0_; |
| |
| // required bytes input_event_proto_bytes = 1; |
| private com.google.protobuf.ByteString inputEventProtoBytes_ = com.google.protobuf.ByteString.EMPTY; |
| /** |
| * <code>required bytes input_event_proto_bytes = 1;</code> |
| */ |
| public boolean hasInputEventProtoBytes() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>required bytes input_event_proto_bytes = 1;</code> |
| */ |
| public com.google.protobuf.ByteString getInputEventProtoBytes() { |
| return inputEventProtoBytes_; |
| } |
| /** |
| * <code>required bytes input_event_proto_bytes = 1;</code> |
| */ |
| public Builder setInputEventProtoBytes(com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000001; |
| inputEventProtoBytes_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>required bytes input_event_proto_bytes = 1;</code> |
| */ |
| public Builder clearInputEventProtoBytes() { |
| bitField0_ = (bitField0_ & ~0x00000001); |
| inputEventProtoBytes_ = getDefaultInstance().getInputEventProtoBytes(); |
| onChanged(); |
| return this; |
| } |
| |
| // required string vertex_name = 2; |
| private java.lang.Object vertexName_ = ""; |
| /** |
| * <code>required string vertex_name = 2;</code> |
| */ |
| public boolean hasVertexName() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>required string vertex_name = 2;</code> |
| */ |
| public java.lang.String getVertexName() { |
| java.lang.Object ref = vertexName_; |
| if (!(ref instanceof java.lang.String)) { |
| java.lang.String s = ((com.google.protobuf.ByteString) ref) |
| .toStringUtf8(); |
| vertexName_ = s; |
| return s; |
| } else { |
| return (java.lang.String) ref; |
| } |
| } |
| /** |
| * <code>required string vertex_name = 2;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getVertexNameBytes() { |
| java.lang.Object ref = vertexName_; |
| if (ref instanceof String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| vertexName_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| /** |
| * <code>required string vertex_name = 2;</code> |
| */ |
| public Builder setVertexName( |
| java.lang.String value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000002; |
| vertexName_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>required string vertex_name = 2;</code> |
| */ |
| public Builder clearVertexName() { |
| bitField0_ = (bitField0_ & ~0x00000002); |
| vertexName_ = getDefaultInstance().getVertexName(); |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>required string vertex_name = 2;</code> |
| */ |
| public Builder setVertexNameBytes( |
| com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000002; |
| vertexName_ = value; |
| onChanged(); |
| return this; |
| } |
| |
| // required string dest_input_name = 3; |
| private java.lang.Object destInputName_ = ""; |
| /** |
| * <code>required string dest_input_name = 3;</code> |
| */ |
| public boolean hasDestInputName() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| /** |
| * <code>required string dest_input_name = 3;</code> |
| */ |
| public java.lang.String getDestInputName() { |
| java.lang.Object ref = destInputName_; |
| if (!(ref instanceof java.lang.String)) { |
| java.lang.String s = ((com.google.protobuf.ByteString) ref) |
| .toStringUtf8(); |
| destInputName_ = s; |
| return s; |
| } else { |
| return (java.lang.String) ref; |
| } |
| } |
| /** |
| * <code>required string dest_input_name = 3;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getDestInputNameBytes() { |
| java.lang.Object ref = destInputName_; |
| if (ref instanceof String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| destInputName_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| /** |
| * <code>required string dest_input_name = 3;</code> |
| */ |
| public Builder setDestInputName( |
| java.lang.String value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000004; |
| destInputName_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>required string dest_input_name = 3;</code> |
| */ |
| public Builder clearDestInputName() { |
| bitField0_ = (bitField0_ & ~0x00000004); |
| destInputName_ = getDefaultInstance().getDestInputName(); |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>required string dest_input_name = 3;</code> |
| */ |
| public Builder setDestInputNameBytes( |
| com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000004; |
| destInputName_ = value; |
| onChanged(); |
| return this; |
| } |
| |
| // optional int32 key_id = 4; |
| private int keyId_ ; |
| /** |
| * <code>optional int32 key_id = 4;</code> |
| */ |
| public boolean hasKeyId() { |
| return ((bitField0_ & 0x00000008) == 0x00000008); |
| } |
| /** |
| * <code>optional int32 key_id = 4;</code> |
| */ |
| public int getKeyId() { |
| return keyId_; |
| } |
| /** |
| * <code>optional int32 key_id = 4;</code> |
| */ |
| public Builder setKeyId(int value) { |
| bitField0_ |= 0x00000008; |
| keyId_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional int32 key_id = 4;</code> |
| */ |
| public Builder clearKeyId() { |
| bitField0_ = (bitField0_ & ~0x00000008); |
| keyId_ = 0; |
| onChanged(); |
| return this; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:NotTezEvent) |
| } |
| |
| static { |
| defaultInstance = new NotTezEvent(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:NotTezEvent) |
| } |
| |
| public interface SubmitWorkRequestProtoOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // optional .VertexOrBinary work_spec = 1; |
| /** |
| * <code>optional .VertexOrBinary work_spec = 1;</code> |
| */ |
| boolean hasWorkSpec(); |
| /** |
| * <code>optional .VertexOrBinary work_spec = 1;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary getWorkSpec(); |
| /** |
| * <code>optional .VertexOrBinary work_spec = 1;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinaryOrBuilder getWorkSpecOrBuilder(); |
| |
| // optional bytes work_spec_signature = 2; |
| /** |
| * <code>optional bytes work_spec_signature = 2;</code> |
| */ |
| boolean hasWorkSpecSignature(); |
| /** |
| * <code>optional bytes work_spec_signature = 2;</code> |
| */ |
| com.google.protobuf.ByteString getWorkSpecSignature(); |
| |
| // optional int32 fragment_number = 3; |
| /** |
| * <code>optional int32 fragment_number = 3;</code> |
| */ |
| boolean hasFragmentNumber(); |
| /** |
| * <code>optional int32 fragment_number = 3;</code> |
| */ |
| int getFragmentNumber(); |
| |
| // optional int32 attempt_number = 4; |
| /** |
| * <code>optional int32 attempt_number = 4;</code> |
| */ |
| boolean hasAttemptNumber(); |
| /** |
| * <code>optional int32 attempt_number = 4;</code> |
| */ |
| int getAttemptNumber(); |
| |
| // optional string container_id_string = 5; |
| /** |
| * <code>optional string container_id_string = 5;</code> |
| */ |
| boolean hasContainerIdString(); |
| /** |
| * <code>optional string container_id_string = 5;</code> |
| */ |
| java.lang.String getContainerIdString(); |
| /** |
| * <code>optional string container_id_string = 5;</code> |
| */ |
| com.google.protobuf.ByteString |
| getContainerIdStringBytes(); |
| |
| // optional string am_host = 6; |
| /** |
| * <code>optional string am_host = 6;</code> |
| */ |
| boolean hasAmHost(); |
| /** |
| * <code>optional string am_host = 6;</code> |
| */ |
| java.lang.String getAmHost(); |
| /** |
| * <code>optional string am_host = 6;</code> |
| */ |
| com.google.protobuf.ByteString |
| getAmHostBytes(); |
| |
| // optional int32 am_port = 7; |
| /** |
| * <code>optional int32 am_port = 7;</code> |
| */ |
| boolean hasAmPort(); |
| /** |
| * <code>optional int32 am_port = 7;</code> |
| */ |
| int getAmPort(); |
| |
| // optional bytes credentials_binary = 8; |
| /** |
| * <code>optional bytes credentials_binary = 8;</code> |
| * |
| * <pre> |
| * Credentials are not signed - the client can add e.g. his own HDFS tokens. |
| * </pre> |
| */ |
| boolean hasCredentialsBinary(); |
| /** |
| * <code>optional bytes credentials_binary = 8;</code> |
| * |
| * <pre> |
| * Credentials are not signed - the client can add e.g. his own HDFS tokens. |
| * </pre> |
| */ |
| com.google.protobuf.ByteString getCredentialsBinary(); |
| |
| // optional .FragmentRuntimeInfo fragment_runtime_info = 9; |
| /** |
| * <code>optional .FragmentRuntimeInfo fragment_runtime_info = 9;</code> |
| * |
| * <pre> |
| * Not supported/honored for external clients right now. |
| * </pre> |
| */ |
| boolean hasFragmentRuntimeInfo(); |
| /** |
| * <code>optional .FragmentRuntimeInfo fragment_runtime_info = 9;</code> |
| * |
| * <pre> |
| * Not supported/honored for external clients right now. |
| * </pre> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo getFragmentRuntimeInfo(); |
| /** |
| * <code>optional .FragmentRuntimeInfo fragment_runtime_info = 9;</code> |
| * |
| * <pre> |
| * Not supported/honored for external clients right now. |
| * </pre> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfoOrBuilder getFragmentRuntimeInfoOrBuilder(); |
| |
| // optional bytes initial_event_bytes = 10; |
| /** |
| * <code>optional bytes initial_event_bytes = 10;</code> |
| * |
| * <pre> |
| * Serialized (and signed) NotTezEvent; used only for external clients for now. |
| * </pre> |
| */ |
| boolean hasInitialEventBytes(); |
| /** |
| * <code>optional bytes initial_event_bytes = 10;</code> |
| * |
| * <pre> |
| * Serialized (and signed) NotTezEvent; used only for external clients for now. |
| * </pre> |
| */ |
| com.google.protobuf.ByteString getInitialEventBytes(); |
| |
| // optional bytes initial_event_signature = 11; |
| /** |
| * <code>optional bytes initial_event_signature = 11;</code> |
| */ |
| boolean hasInitialEventSignature(); |
| /** |
| * <code>optional bytes initial_event_signature = 11;</code> |
| */ |
| com.google.protobuf.ByteString getInitialEventSignature(); |
| } |
| /** |
| * Protobuf type {@code SubmitWorkRequestProto} |
| */ |
| public static final class SubmitWorkRequestProto extends |
| com.google.protobuf.GeneratedMessage |
| implements SubmitWorkRequestProtoOrBuilder { |
| // Use SubmitWorkRequestProto.newBuilder() to construct. |
| private SubmitWorkRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) { |
| super(builder); |
| this.unknownFields = builder.getUnknownFields(); |
| } |
| private SubmitWorkRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } |
| |
| private static final SubmitWorkRequestProto defaultInstance; |
| public static SubmitWorkRequestProto getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public SubmitWorkRequestProto getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| private final com.google.protobuf.UnknownFieldSet unknownFields; |
| @java.lang.Override |
| public final com.google.protobuf.UnknownFieldSet |
| getUnknownFields() { |
| return this.unknownFields; |
| } |
| private SubmitWorkRequestProto( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| initFields(); |
| int mutable_bitField0_ = 0; |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder(); |
| try { |
| boolean done = false; |
| while (!done) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| done = true; |
| break; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| done = true; |
| } |
| break; |
| } |
| case 10: { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.Builder subBuilder = null; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| subBuilder = workSpec_.toBuilder(); |
| } |
| workSpec_ = input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.PARSER, extensionRegistry); |
| if (subBuilder != null) { |
| subBuilder.mergeFrom(workSpec_); |
| workSpec_ = subBuilder.buildPartial(); |
| } |
| bitField0_ |= 0x00000001; |
| break; |
| } |
| case 18: { |
| bitField0_ |= 0x00000002; |
| workSpecSignature_ = input.readBytes(); |
| break; |
| } |
| case 24: { |
| bitField0_ |= 0x00000004; |
| fragmentNumber_ = input.readInt32(); |
| break; |
| } |
| case 32: { |
| bitField0_ |= 0x00000008; |
| attemptNumber_ = input.readInt32(); |
| break; |
| } |
| case 42: { |
| bitField0_ |= 0x00000010; |
| containerIdString_ = input.readBytes(); |
| break; |
| } |
| case 50: { |
| bitField0_ |= 0x00000020; |
| amHost_ = input.readBytes(); |
| break; |
| } |
| case 56: { |
| bitField0_ |= 0x00000040; |
| amPort_ = input.readInt32(); |
| break; |
| } |
| case 66: { |
| bitField0_ |= 0x00000080; |
| credentialsBinary_ = input.readBytes(); |
| break; |
| } |
| case 74: { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.Builder subBuilder = null; |
| if (((bitField0_ & 0x00000100) == 0x00000100)) { |
| subBuilder = fragmentRuntimeInfo_.toBuilder(); |
| } |
| fragmentRuntimeInfo_ = input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.PARSER, extensionRegistry); |
| if (subBuilder != null) { |
| subBuilder.mergeFrom(fragmentRuntimeInfo_); |
| fragmentRuntimeInfo_ = subBuilder.buildPartial(); |
| } |
| bitField0_ |= 0x00000100; |
| break; |
| } |
| case 82: { |
| bitField0_ |= 0x00000200; |
| initialEventBytes_ = input.readBytes(); |
| break; |
| } |
| case 90: { |
| bitField0_ |= 0x00000400; |
| initialEventSignature_ = input.readBytes(); |
| break; |
| } |
| } |
| } |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| throw e.setUnfinishedMessage(this); |
| } catch (java.io.IOException e) { |
| throw new com.google.protobuf.InvalidProtocolBufferException( |
| e.getMessage()).setUnfinishedMessage(this); |
| } finally { |
| this.unknownFields = unknownFields.build(); |
| makeExtensionsImmutable(); |
| } |
| } |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SubmitWorkRequestProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SubmitWorkRequestProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto.Builder.class); |
| } |
| |
| public static com.google.protobuf.Parser<SubmitWorkRequestProto> PARSER = |
| new com.google.protobuf.AbstractParser<SubmitWorkRequestProto>() { |
| public SubmitWorkRequestProto parsePartialFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return new SubmitWorkRequestProto(input, extensionRegistry); |
| } |
| }; |
| |
| @java.lang.Override |
| public com.google.protobuf.Parser<SubmitWorkRequestProto> getParserForType() { |
| return PARSER; |
| } |
| |
| private int bitField0_; |
| // optional .VertexOrBinary work_spec = 1; |
| public static final int WORK_SPEC_FIELD_NUMBER = 1; |
| private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary workSpec_; |
| /** |
| * <code>optional .VertexOrBinary work_spec = 1;</code> |
| */ |
| public boolean hasWorkSpec() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional .VertexOrBinary work_spec = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary getWorkSpec() { |
| return workSpec_; |
| } |
| /** |
| * <code>optional .VertexOrBinary work_spec = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinaryOrBuilder getWorkSpecOrBuilder() { |
| return workSpec_; |
| } |
| |
| // optional bytes work_spec_signature = 2; |
| public static final int WORK_SPEC_SIGNATURE_FIELD_NUMBER = 2; |
| private com.google.protobuf.ByteString workSpecSignature_; |
| /** |
| * <code>optional bytes work_spec_signature = 2;</code> |
| */ |
| public boolean hasWorkSpecSignature() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional bytes work_spec_signature = 2;</code> |
| */ |
| public com.google.protobuf.ByteString getWorkSpecSignature() { |
| return workSpecSignature_; |
| } |
| |
| // optional int32 fragment_number = 3; |
| public static final int FRAGMENT_NUMBER_FIELD_NUMBER = 3; |
| private int fragmentNumber_; |
| /** |
| * <code>optional int32 fragment_number = 3;</code> |
| */ |
| public boolean hasFragmentNumber() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| /** |
| * <code>optional int32 fragment_number = 3;</code> |
| */ |
| public int getFragmentNumber() { |
| return fragmentNumber_; |
| } |
| |
| // optional int32 attempt_number = 4; |
| public static final int ATTEMPT_NUMBER_FIELD_NUMBER = 4; |
| private int attemptNumber_; |
| /** |
| * <code>optional int32 attempt_number = 4;</code> |
| */ |
| public boolean hasAttemptNumber() { |
| return ((bitField0_ & 0x00000008) == 0x00000008); |
| } |
| /** |
| * <code>optional int32 attempt_number = 4;</code> |
| */ |
| public int getAttemptNumber() { |
| return attemptNumber_; |
| } |
| |
| // optional string container_id_string = 5; |
| public static final int CONTAINER_ID_STRING_FIELD_NUMBER = 5; |
| private java.lang.Object containerIdString_; |
| /** |
| * <code>optional string container_id_string = 5;</code> |
| */ |
| public boolean hasContainerIdString() { |
| return ((bitField0_ & 0x00000010) == 0x00000010); |
| } |
| /** |
| * <code>optional string container_id_string = 5;</code> |
| */ |
| public java.lang.String getContainerIdString() { |
| java.lang.Object ref = containerIdString_; |
| if (ref instanceof java.lang.String) { |
| return (java.lang.String) ref; |
| } else { |
| com.google.protobuf.ByteString bs = |
| (com.google.protobuf.ByteString) ref; |
| java.lang.String s = bs.toStringUtf8(); |
| if (bs.isValidUtf8()) { |
| containerIdString_ = s; |
| } |
| return s; |
| } |
| } |
| /** |
| * <code>optional string container_id_string = 5;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getContainerIdStringBytes() { |
| java.lang.Object ref = containerIdString_; |
| if (ref instanceof java.lang.String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| containerIdString_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| |
| // optional string am_host = 6; |
| public static final int AM_HOST_FIELD_NUMBER = 6; |
| private java.lang.Object amHost_; |
| /** |
| * <code>optional string am_host = 6;</code> |
| */ |
| public boolean hasAmHost() { |
| return ((bitField0_ & 0x00000020) == 0x00000020); |
| } |
| /** |
| * <code>optional string am_host = 6;</code> |
| */ |
| public java.lang.String getAmHost() { |
| java.lang.Object ref = amHost_; |
| if (ref instanceof java.lang.String) { |
| return (java.lang.String) ref; |
| } else { |
| com.google.protobuf.ByteString bs = |
| (com.google.protobuf.ByteString) ref; |
| java.lang.String s = bs.toStringUtf8(); |
| if (bs.isValidUtf8()) { |
| amHost_ = s; |
| } |
| return s; |
| } |
| } |
| /** |
| * <code>optional string am_host = 6;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getAmHostBytes() { |
| java.lang.Object ref = amHost_; |
| if (ref instanceof java.lang.String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| amHost_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| |
| // optional int32 am_port = 7; |
| public static final int AM_PORT_FIELD_NUMBER = 7; |
| private int amPort_; |
| /** |
| * <code>optional int32 am_port = 7;</code> |
| */ |
| public boolean hasAmPort() { |
| return ((bitField0_ & 0x00000040) == 0x00000040); |
| } |
| /** |
| * <code>optional int32 am_port = 7;</code> |
| */ |
| public int getAmPort() { |
| return amPort_; |
| } |
| |
| // optional bytes credentials_binary = 8; |
| public static final int CREDENTIALS_BINARY_FIELD_NUMBER = 8; |
| private com.google.protobuf.ByteString credentialsBinary_; |
| /** |
| * <code>optional bytes credentials_binary = 8;</code> |
| * |
| * <pre> |
| * Credentials are not signed - the client can add e.g. his own HDFS tokens. |
| * </pre> |
| */ |
| public boolean hasCredentialsBinary() { |
| return ((bitField0_ & 0x00000080) == 0x00000080); |
| } |
| /** |
| * <code>optional bytes credentials_binary = 8;</code> |
| * |
| * <pre> |
| * Credentials are not signed - the client can add e.g. his own HDFS tokens. |
| * </pre> |
| */ |
| public com.google.protobuf.ByteString getCredentialsBinary() { |
| return credentialsBinary_; |
| } |
| |
| // optional .FragmentRuntimeInfo fragment_runtime_info = 9; |
| public static final int FRAGMENT_RUNTIME_INFO_FIELD_NUMBER = 9; |
| private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo fragmentRuntimeInfo_; |
| /** |
| * <code>optional .FragmentRuntimeInfo fragment_runtime_info = 9;</code> |
| * |
| * <pre> |
| * Not supported/honored for external clients right now. |
| * </pre> |
| */ |
| public boolean hasFragmentRuntimeInfo() { |
| return ((bitField0_ & 0x00000100) == 0x00000100); |
| } |
| /** |
| * <code>optional .FragmentRuntimeInfo fragment_runtime_info = 9;</code> |
| * |
| * <pre> |
| * Not supported/honored for external clients right now. |
| * </pre> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo getFragmentRuntimeInfo() { |
| return fragmentRuntimeInfo_; |
| } |
| /** |
| * <code>optional .FragmentRuntimeInfo fragment_runtime_info = 9;</code> |
| * |
| * <pre> |
| * Not supported/honored for external clients right now. |
| * </pre> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfoOrBuilder getFragmentRuntimeInfoOrBuilder() { |
| return fragmentRuntimeInfo_; |
| } |
| |
| // optional bytes initial_event_bytes = 10; |
| public static final int INITIAL_EVENT_BYTES_FIELD_NUMBER = 10; |
| private com.google.protobuf.ByteString initialEventBytes_; |
| /** |
| * <code>optional bytes initial_event_bytes = 10;</code> |
| * |
| * <pre> |
| * Serialized (and signed) NotTezEvent; used only for external clients for now. |
| * </pre> |
| */ |
| public boolean hasInitialEventBytes() { |
| return ((bitField0_ & 0x00000200) == 0x00000200); |
| } |
| /** |
| * <code>optional bytes initial_event_bytes = 10;</code> |
| * |
| * <pre> |
| * Serialized (and signed) NotTezEvent; used only for external clients for now. |
| * </pre> |
| */ |
| public com.google.protobuf.ByteString getInitialEventBytes() { |
| return initialEventBytes_; |
| } |
| |
| // optional bytes initial_event_signature = 11; |
| public static final int INITIAL_EVENT_SIGNATURE_FIELD_NUMBER = 11; |
| private com.google.protobuf.ByteString initialEventSignature_; |
| /** |
| * <code>optional bytes initial_event_signature = 11;</code> |
| */ |
| public boolean hasInitialEventSignature() { |
| return ((bitField0_ & 0x00000400) == 0x00000400); |
| } |
| /** |
| * <code>optional bytes initial_event_signature = 11;</code> |
| */ |
| public com.google.protobuf.ByteString getInitialEventSignature() { |
| return initialEventSignature_; |
| } |
| |
| private void initFields() { |
| workSpec_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.getDefaultInstance(); |
| workSpecSignature_ = com.google.protobuf.ByteString.EMPTY; |
| fragmentNumber_ = 0; |
| attemptNumber_ = 0; |
| containerIdString_ = ""; |
| amHost_ = ""; |
| amPort_ = 0; |
| credentialsBinary_ = com.google.protobuf.ByteString.EMPTY; |
| fragmentRuntimeInfo_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.getDefaultInstance(); |
| initialEventBytes_ = com.google.protobuf.ByteString.EMPTY; |
| initialEventSignature_ = com.google.protobuf.ByteString.EMPTY; |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeMessage(1, workSpec_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| output.writeBytes(2, workSpecSignature_); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| output.writeInt32(3, fragmentNumber_); |
| } |
| if (((bitField0_ & 0x00000008) == 0x00000008)) { |
| output.writeInt32(4, attemptNumber_); |
| } |
| if (((bitField0_ & 0x00000010) == 0x00000010)) { |
| output.writeBytes(5, getContainerIdStringBytes()); |
| } |
| if (((bitField0_ & 0x00000020) == 0x00000020)) { |
| output.writeBytes(6, getAmHostBytes()); |
| } |
| if (((bitField0_ & 0x00000040) == 0x00000040)) { |
| output.writeInt32(7, amPort_); |
| } |
| if (((bitField0_ & 0x00000080) == 0x00000080)) { |
| output.writeBytes(8, credentialsBinary_); |
| } |
| if (((bitField0_ & 0x00000100) == 0x00000100)) { |
| output.writeMessage(9, fragmentRuntimeInfo_); |
| } |
| if (((bitField0_ & 0x00000200) == 0x00000200)) { |
| output.writeBytes(10, initialEventBytes_); |
| } |
| if (((bitField0_ & 0x00000400) == 0x00000400)) { |
| output.writeBytes(11, initialEventSignature_); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(1, workSpec_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(2, workSpecSignature_); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeInt32Size(3, fragmentNumber_); |
| } |
| if (((bitField0_ & 0x00000008) == 0x00000008)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeInt32Size(4, attemptNumber_); |
| } |
| if (((bitField0_ & 0x00000010) == 0x00000010)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(5, getContainerIdStringBytes()); |
| } |
| if (((bitField0_ & 0x00000020) == 0x00000020)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(6, getAmHostBytes()); |
| } |
| if (((bitField0_ & 0x00000040) == 0x00000040)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeInt32Size(7, amPort_); |
| } |
| if (((bitField0_ & 0x00000080) == 0x00000080)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(8, credentialsBinary_); |
| } |
| if (((bitField0_ & 0x00000100) == 0x00000100)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(9, fragmentRuntimeInfo_); |
| } |
| if (((bitField0_ & 0x00000200) == 0x00000200)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(10, initialEventBytes_); |
| } |
| if (((bitField0_ & 0x00000400) == 0x00000400)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(11, initialEventSignature_); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto) obj; |
| |
| boolean result = true; |
| result = result && (hasWorkSpec() == other.hasWorkSpec()); |
| if (hasWorkSpec()) { |
| result = result && getWorkSpec() |
| .equals(other.getWorkSpec()); |
| } |
| result = result && (hasWorkSpecSignature() == other.hasWorkSpecSignature()); |
| if (hasWorkSpecSignature()) { |
| result = result && getWorkSpecSignature() |
| .equals(other.getWorkSpecSignature()); |
| } |
| result = result && (hasFragmentNumber() == other.hasFragmentNumber()); |
| if (hasFragmentNumber()) { |
| result = result && (getFragmentNumber() |
| == other.getFragmentNumber()); |
| } |
| result = result && (hasAttemptNumber() == other.hasAttemptNumber()); |
| if (hasAttemptNumber()) { |
| result = result && (getAttemptNumber() |
| == other.getAttemptNumber()); |
| } |
| result = result && (hasContainerIdString() == other.hasContainerIdString()); |
| if (hasContainerIdString()) { |
| result = result && getContainerIdString() |
| .equals(other.getContainerIdString()); |
| } |
| result = result && (hasAmHost() == other.hasAmHost()); |
| if (hasAmHost()) { |
| result = result && getAmHost() |
| .equals(other.getAmHost()); |
| } |
| result = result && (hasAmPort() == other.hasAmPort()); |
| if (hasAmPort()) { |
| result = result && (getAmPort() |
| == other.getAmPort()); |
| } |
| result = result && (hasCredentialsBinary() == other.hasCredentialsBinary()); |
| if (hasCredentialsBinary()) { |
| result = result && getCredentialsBinary() |
| .equals(other.getCredentialsBinary()); |
| } |
| result = result && (hasFragmentRuntimeInfo() == other.hasFragmentRuntimeInfo()); |
| if (hasFragmentRuntimeInfo()) { |
| result = result && getFragmentRuntimeInfo() |
| .equals(other.getFragmentRuntimeInfo()); |
| } |
| result = result && (hasInitialEventBytes() == other.hasInitialEventBytes()); |
| if (hasInitialEventBytes()) { |
| result = result && getInitialEventBytes() |
| .equals(other.getInitialEventBytes()); |
| } |
| result = result && (hasInitialEventSignature() == other.hasInitialEventSignature()); |
| if (hasInitialEventSignature()) { |
| result = result && getInitialEventSignature() |
| .equals(other.getInitialEventSignature()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| private int memoizedHashCode = 0; |
| @java.lang.Override |
| public int hashCode() { |
| if (memoizedHashCode != 0) { |
| return memoizedHashCode; |
| } |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasWorkSpec()) { |
| hash = (37 * hash) + WORK_SPEC_FIELD_NUMBER; |
| hash = (53 * hash) + getWorkSpec().hashCode(); |
| } |
| if (hasWorkSpecSignature()) { |
| hash = (37 * hash) + WORK_SPEC_SIGNATURE_FIELD_NUMBER; |
| hash = (53 * hash) + getWorkSpecSignature().hashCode(); |
| } |
| if (hasFragmentNumber()) { |
| hash = (37 * hash) + FRAGMENT_NUMBER_FIELD_NUMBER; |
| hash = (53 * hash) + getFragmentNumber(); |
| } |
| if (hasAttemptNumber()) { |
| hash = (37 * hash) + ATTEMPT_NUMBER_FIELD_NUMBER; |
| hash = (53 * hash) + getAttemptNumber(); |
| } |
| if (hasContainerIdString()) { |
| hash = (37 * hash) + CONTAINER_ID_STRING_FIELD_NUMBER; |
| hash = (53 * hash) + getContainerIdString().hashCode(); |
| } |
| if (hasAmHost()) { |
| hash = (37 * hash) + AM_HOST_FIELD_NUMBER; |
| hash = (53 * hash) + getAmHost().hashCode(); |
| } |
| if (hasAmPort()) { |
| hash = (37 * hash) + AM_PORT_FIELD_NUMBER; |
| hash = (53 * hash) + getAmPort(); |
| } |
| if (hasCredentialsBinary()) { |
| hash = (37 * hash) + CREDENTIALS_BINARY_FIELD_NUMBER; |
| hash = (53 * hash) + getCredentialsBinary().hashCode(); |
| } |
| if (hasFragmentRuntimeInfo()) { |
| hash = (37 * hash) + FRAGMENT_RUNTIME_INFO_FIELD_NUMBER; |
| hash = (53 * hash) + getFragmentRuntimeInfo().hashCode(); |
| } |
| if (hasInitialEventBytes()) { |
| hash = (37 * hash) + INITIAL_EVENT_BYTES_FIELD_NUMBER; |
| hash = (53 * hash) + getInitialEventBytes().hashCode(); |
| } |
| if (hasInitialEventSignature()) { |
| hash = (37 * hash) + INITIAL_EVENT_SIGNATURE_FIELD_NUMBER; |
| hash = (53 * hash) + getInitialEventSignature().hashCode(); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| memoizedHashCode = hash; |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| /** |
| * Protobuf type {@code SubmitWorkRequestProto} |
| */ |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProtoOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SubmitWorkRequestProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SubmitWorkRequestProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto.Builder.class); |
| } |
| |
| // Construct using org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| getWorkSpecFieldBuilder(); |
| getFragmentRuntimeInfoFieldBuilder(); |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| if (workSpecBuilder_ == null) { |
| workSpec_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.getDefaultInstance(); |
| } else { |
| workSpecBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000001); |
| workSpecSignature_ = com.google.protobuf.ByteString.EMPTY; |
| bitField0_ = (bitField0_ & ~0x00000002); |
| fragmentNumber_ = 0; |
| bitField0_ = (bitField0_ & ~0x00000004); |
| attemptNumber_ = 0; |
| bitField0_ = (bitField0_ & ~0x00000008); |
| containerIdString_ = ""; |
| bitField0_ = (bitField0_ & ~0x00000010); |
| amHost_ = ""; |
| bitField0_ = (bitField0_ & ~0x00000020); |
| amPort_ = 0; |
| bitField0_ = (bitField0_ & ~0x00000040); |
| credentialsBinary_ = com.google.protobuf.ByteString.EMPTY; |
| bitField0_ = (bitField0_ & ~0x00000080); |
| if (fragmentRuntimeInfoBuilder_ == null) { |
| fragmentRuntimeInfo_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.getDefaultInstance(); |
| } else { |
| fragmentRuntimeInfoBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000100); |
| initialEventBytes_ = com.google.protobuf.ByteString.EMPTY; |
| bitField0_ = (bitField0_ & ~0x00000200); |
| initialEventSignature_ = com.google.protobuf.ByteString.EMPTY; |
| bitField0_ = (bitField0_ & ~0x00000400); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SubmitWorkRequestProto_descriptor; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto getDefaultInstanceForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto build() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto buildPartial() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| if (workSpecBuilder_ == null) { |
| result.workSpec_ = workSpec_; |
| } else { |
| result.workSpec_ = workSpecBuilder_.build(); |
| } |
| if (((from_bitField0_ & 0x00000002) == 0x00000002)) { |
| to_bitField0_ |= 0x00000002; |
| } |
| result.workSpecSignature_ = workSpecSignature_; |
| if (((from_bitField0_ & 0x00000004) == 0x00000004)) { |
| to_bitField0_ |= 0x00000004; |
| } |
| result.fragmentNumber_ = fragmentNumber_; |
| if (((from_bitField0_ & 0x00000008) == 0x00000008)) { |
| to_bitField0_ |= 0x00000008; |
| } |
| result.attemptNumber_ = attemptNumber_; |
| if (((from_bitField0_ & 0x00000010) == 0x00000010)) { |
| to_bitField0_ |= 0x00000010; |
| } |
| result.containerIdString_ = containerIdString_; |
| if (((from_bitField0_ & 0x00000020) == 0x00000020)) { |
| to_bitField0_ |= 0x00000020; |
| } |
| result.amHost_ = amHost_; |
| if (((from_bitField0_ & 0x00000040) == 0x00000040)) { |
| to_bitField0_ |= 0x00000040; |
| } |
| result.amPort_ = amPort_; |
| if (((from_bitField0_ & 0x00000080) == 0x00000080)) { |
| to_bitField0_ |= 0x00000080; |
| } |
| result.credentialsBinary_ = credentialsBinary_; |
| if (((from_bitField0_ & 0x00000100) == 0x00000100)) { |
| to_bitField0_ |= 0x00000100; |
| } |
| if (fragmentRuntimeInfoBuilder_ == null) { |
| result.fragmentRuntimeInfo_ = fragmentRuntimeInfo_; |
| } else { |
| result.fragmentRuntimeInfo_ = fragmentRuntimeInfoBuilder_.build(); |
| } |
| if (((from_bitField0_ & 0x00000200) == 0x00000200)) { |
| to_bitField0_ |= 0x00000200; |
| } |
| result.initialEventBytes_ = initialEventBytes_; |
| if (((from_bitField0_ & 0x00000400) == 0x00000400)) { |
| to_bitField0_ |= 0x00000400; |
| } |
| result.initialEventSignature_ = initialEventSignature_; |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto) { |
| return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto other) { |
| if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto.getDefaultInstance()) return this; |
| if (other.hasWorkSpec()) { |
| mergeWorkSpec(other.getWorkSpec()); |
| } |
| if (other.hasWorkSpecSignature()) { |
| setWorkSpecSignature(other.getWorkSpecSignature()); |
| } |
| if (other.hasFragmentNumber()) { |
| setFragmentNumber(other.getFragmentNumber()); |
| } |
| if (other.hasAttemptNumber()) { |
| setAttemptNumber(other.getAttemptNumber()); |
| } |
| if (other.hasContainerIdString()) { |
| bitField0_ |= 0x00000010; |
| containerIdString_ = other.containerIdString_; |
| onChanged(); |
| } |
| if (other.hasAmHost()) { |
| bitField0_ |= 0x00000020; |
| amHost_ = other.amHost_; |
| onChanged(); |
| } |
| if (other.hasAmPort()) { |
| setAmPort(other.getAmPort()); |
| } |
| if (other.hasCredentialsBinary()) { |
| setCredentialsBinary(other.getCredentialsBinary()); |
| } |
| if (other.hasFragmentRuntimeInfo()) { |
| mergeFragmentRuntimeInfo(other.getFragmentRuntimeInfo()); |
| } |
| if (other.hasInitialEventBytes()) { |
| setInitialEventBytes(other.getInitialEventBytes()); |
| } |
| if (other.hasInitialEventSignature()) { |
| setInitialEventSignature(other.getInitialEventSignature()); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto parsedMessage = null; |
| try { |
| parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto) e.getUnfinishedMessage(); |
| throw e; |
| } finally { |
| if (parsedMessage != null) { |
| mergeFrom(parsedMessage); |
| } |
| } |
| return this; |
| } |
| private int bitField0_; |
| |
| // optional .VertexOrBinary work_spec = 1; |
| private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary workSpec_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.getDefaultInstance(); |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinaryOrBuilder> workSpecBuilder_; |
| /** |
| * <code>optional .VertexOrBinary work_spec = 1;</code> |
| */ |
| public boolean hasWorkSpec() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional .VertexOrBinary work_spec = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary getWorkSpec() { |
| if (workSpecBuilder_ == null) { |
| return workSpec_; |
| } else { |
| return workSpecBuilder_.getMessage(); |
| } |
| } |
| /** |
| * <code>optional .VertexOrBinary work_spec = 1;</code> |
| */ |
| public Builder setWorkSpec(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary value) { |
| if (workSpecBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| workSpec_ = value; |
| onChanged(); |
| } else { |
| workSpecBuilder_.setMessage(value); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| /** |
| * <code>optional .VertexOrBinary work_spec = 1;</code> |
| */ |
| public Builder setWorkSpec( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.Builder builderForValue) { |
| if (workSpecBuilder_ == null) { |
| workSpec_ = builderForValue.build(); |
| onChanged(); |
| } else { |
| workSpecBuilder_.setMessage(builderForValue.build()); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| /** |
| * <code>optional .VertexOrBinary work_spec = 1;</code> |
| */ |
| public Builder mergeWorkSpec(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary value) { |
| if (workSpecBuilder_ == null) { |
| if (((bitField0_ & 0x00000001) == 0x00000001) && |
| workSpec_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.getDefaultInstance()) { |
| workSpec_ = |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.newBuilder(workSpec_).mergeFrom(value).buildPartial(); |
| } else { |
| workSpec_ = value; |
| } |
| onChanged(); |
| } else { |
| workSpecBuilder_.mergeFrom(value); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| /** |
| * <code>optional .VertexOrBinary work_spec = 1;</code> |
| */ |
| public Builder clearWorkSpec() { |
| if (workSpecBuilder_ == null) { |
| workSpec_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.getDefaultInstance(); |
| onChanged(); |
| } else { |
| workSpecBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000001); |
| return this; |
| } |
| /** |
| * <code>optional .VertexOrBinary work_spec = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.Builder getWorkSpecBuilder() { |
| bitField0_ |= 0x00000001; |
| onChanged(); |
| return getWorkSpecFieldBuilder().getBuilder(); |
| } |
| /** |
| * <code>optional .VertexOrBinary work_spec = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinaryOrBuilder getWorkSpecOrBuilder() { |
| if (workSpecBuilder_ != null) { |
| return workSpecBuilder_.getMessageOrBuilder(); |
| } else { |
| return workSpec_; |
| } |
| } |
| /** |
| * <code>optional .VertexOrBinary work_spec = 1;</code> |
| */ |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinaryOrBuilder> |
| getWorkSpecFieldBuilder() { |
| if (workSpecBuilder_ == null) { |
| workSpecBuilder_ = new com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinaryOrBuilder>( |
| workSpec_, |
| getParentForChildren(), |
| isClean()); |
| workSpec_ = null; |
| } |
| return workSpecBuilder_; |
| } |
| |
| // optional bytes work_spec_signature = 2; |
| private com.google.protobuf.ByteString workSpecSignature_ = com.google.protobuf.ByteString.EMPTY; |
| /** |
| * <code>optional bytes work_spec_signature = 2;</code> |
| */ |
| public boolean hasWorkSpecSignature() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional bytes work_spec_signature = 2;</code> |
| */ |
| public com.google.protobuf.ByteString getWorkSpecSignature() { |
| return workSpecSignature_; |
| } |
| /** |
| * <code>optional bytes work_spec_signature = 2;</code> |
| */ |
| public Builder setWorkSpecSignature(com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000002; |
| workSpecSignature_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional bytes work_spec_signature = 2;</code> |
| */ |
| public Builder clearWorkSpecSignature() { |
| bitField0_ = (bitField0_ & ~0x00000002); |
| workSpecSignature_ = getDefaultInstance().getWorkSpecSignature(); |
| onChanged(); |
| return this; |
| } |
| |
| // optional int32 fragment_number = 3; |
| private int fragmentNumber_ ; |
| /** |
| * <code>optional int32 fragment_number = 3;</code> |
| */ |
| public boolean hasFragmentNumber() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| /** |
| * <code>optional int32 fragment_number = 3;</code> |
| */ |
| public int getFragmentNumber() { |
| return fragmentNumber_; |
| } |
| /** |
| * <code>optional int32 fragment_number = 3;</code> |
| */ |
| public Builder setFragmentNumber(int value) { |
| bitField0_ |= 0x00000004; |
| fragmentNumber_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional int32 fragment_number = 3;</code> |
| */ |
| public Builder clearFragmentNumber() { |
| bitField0_ = (bitField0_ & ~0x00000004); |
| fragmentNumber_ = 0; |
| onChanged(); |
| return this; |
| } |
| |
| // optional int32 attempt_number = 4; |
| private int attemptNumber_ ; |
| /** |
| * <code>optional int32 attempt_number = 4;</code> |
| */ |
| public boolean hasAttemptNumber() { |
| return ((bitField0_ & 0x00000008) == 0x00000008); |
| } |
| /** |
| * <code>optional int32 attempt_number = 4;</code> |
| */ |
| public int getAttemptNumber() { |
| return attemptNumber_; |
| } |
| /** |
| * <code>optional int32 attempt_number = 4;</code> |
| */ |
| public Builder setAttemptNumber(int value) { |
| bitField0_ |= 0x00000008; |
| attemptNumber_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional int32 attempt_number = 4;</code> |
| */ |
| public Builder clearAttemptNumber() { |
| bitField0_ = (bitField0_ & ~0x00000008); |
| attemptNumber_ = 0; |
| onChanged(); |
| return this; |
| } |
| |
| // optional string container_id_string = 5; |
| private java.lang.Object containerIdString_ = ""; |
| /** |
| * <code>optional string container_id_string = 5;</code> |
| */ |
| public boolean hasContainerIdString() { |
| return ((bitField0_ & 0x00000010) == 0x00000010); |
| } |
| /** |
| * <code>optional string container_id_string = 5;</code> |
| */ |
| public java.lang.String getContainerIdString() { |
| java.lang.Object ref = containerIdString_; |
| if (!(ref instanceof java.lang.String)) { |
| java.lang.String s = ((com.google.protobuf.ByteString) ref) |
| .toStringUtf8(); |
| containerIdString_ = s; |
| return s; |
| } else { |
| return (java.lang.String) ref; |
| } |
| } |
| /** |
| * <code>optional string container_id_string = 5;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getContainerIdStringBytes() { |
| java.lang.Object ref = containerIdString_; |
| if (ref instanceof String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| containerIdString_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| /** |
| * <code>optional string container_id_string = 5;</code> |
| */ |
| public Builder setContainerIdString( |
| java.lang.String value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000010; |
| containerIdString_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string container_id_string = 5;</code> |
| */ |
| public Builder clearContainerIdString() { |
| bitField0_ = (bitField0_ & ~0x00000010); |
| containerIdString_ = getDefaultInstance().getContainerIdString(); |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string container_id_string = 5;</code> |
| */ |
| public Builder setContainerIdStringBytes( |
| com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000010; |
| containerIdString_ = value; |
| onChanged(); |
| return this; |
| } |
| |
| // optional string am_host = 6; |
| private java.lang.Object amHost_ = ""; |
| /** |
| * <code>optional string am_host = 6;</code> |
| */ |
| public boolean hasAmHost() { |
| return ((bitField0_ & 0x00000020) == 0x00000020); |
| } |
| /** |
| * <code>optional string am_host = 6;</code> |
| */ |
| public java.lang.String getAmHost() { |
| java.lang.Object ref = amHost_; |
| if (!(ref instanceof java.lang.String)) { |
| java.lang.String s = ((com.google.protobuf.ByteString) ref) |
| .toStringUtf8(); |
| amHost_ = s; |
| return s; |
| } else { |
| return (java.lang.String) ref; |
| } |
| } |
| /** |
| * <code>optional string am_host = 6;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getAmHostBytes() { |
| java.lang.Object ref = amHost_; |
| if (ref instanceof String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| amHost_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| /** |
| * <code>optional string am_host = 6;</code> |
| */ |
| public Builder setAmHost( |
| java.lang.String value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000020; |
| amHost_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string am_host = 6;</code> |
| */ |
| public Builder clearAmHost() { |
| bitField0_ = (bitField0_ & ~0x00000020); |
| amHost_ = getDefaultInstance().getAmHost(); |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string am_host = 6;</code> |
| */ |
| public Builder setAmHostBytes( |
| com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000020; |
| amHost_ = value; |
| onChanged(); |
| return this; |
| } |
| |
| // optional int32 am_port = 7; |
| private int amPort_ ; |
| /** |
| * <code>optional int32 am_port = 7;</code> |
| */ |
| public boolean hasAmPort() { |
| return ((bitField0_ & 0x00000040) == 0x00000040); |
| } |
| /** |
| * <code>optional int32 am_port = 7;</code> |
| */ |
| public int getAmPort() { |
| return amPort_; |
| } |
| /** |
| * <code>optional int32 am_port = 7;</code> |
| */ |
| public Builder setAmPort(int value) { |
| bitField0_ |= 0x00000040; |
| amPort_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional int32 am_port = 7;</code> |
| */ |
| public Builder clearAmPort() { |
| bitField0_ = (bitField0_ & ~0x00000040); |
| amPort_ = 0; |
| onChanged(); |
| return this; |
| } |
| |
| // optional bytes credentials_binary = 8; |
| private com.google.protobuf.ByteString credentialsBinary_ = com.google.protobuf.ByteString.EMPTY; |
| /** |
| * <code>optional bytes credentials_binary = 8;</code> |
| * |
| * <pre> |
| * Credentials are not signed - the client can add e.g. his own HDFS tokens. |
| * </pre> |
| */ |
| public boolean hasCredentialsBinary() { |
| return ((bitField0_ & 0x00000080) == 0x00000080); |
| } |
| /** |
| * <code>optional bytes credentials_binary = 8;</code> |
| * |
| * <pre> |
| * Credentials are not signed - the client can add e.g. his own HDFS tokens. |
| * </pre> |
| */ |
| public com.google.protobuf.ByteString getCredentialsBinary() { |
| return credentialsBinary_; |
| } |
| /** |
| * <code>optional bytes credentials_binary = 8;</code> |
| * |
| * <pre> |
| * Credentials are not signed - the client can add e.g. his own HDFS tokens. |
| * </pre> |
| */ |
| public Builder setCredentialsBinary(com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000080; |
| credentialsBinary_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional bytes credentials_binary = 8;</code> |
| * |
| * <pre> |
| * Credentials are not signed - the client can add e.g. his own HDFS tokens. |
| * </pre> |
| */ |
| public Builder clearCredentialsBinary() { |
| bitField0_ = (bitField0_ & ~0x00000080); |
| credentialsBinary_ = getDefaultInstance().getCredentialsBinary(); |
| onChanged(); |
| return this; |
| } |
| |
| // optional .FragmentRuntimeInfo fragment_runtime_info = 9; |
| private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo fragmentRuntimeInfo_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.getDefaultInstance(); |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfoOrBuilder> fragmentRuntimeInfoBuilder_; |
| /** |
| * <code>optional .FragmentRuntimeInfo fragment_runtime_info = 9;</code> |
| * |
| * <pre> |
| * Not supported/honored for external clients right now. |
| * </pre> |
| */ |
| public boolean hasFragmentRuntimeInfo() { |
| return ((bitField0_ & 0x00000100) == 0x00000100); |
| } |
| /** |
| * <code>optional .FragmentRuntimeInfo fragment_runtime_info = 9;</code> |
| * |
| * <pre> |
| * Not supported/honored for external clients right now. |
| * </pre> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo getFragmentRuntimeInfo() { |
| if (fragmentRuntimeInfoBuilder_ == null) { |
| return fragmentRuntimeInfo_; |
| } else { |
| return fragmentRuntimeInfoBuilder_.getMessage(); |
| } |
| } |
| /** |
| * <code>optional .FragmentRuntimeInfo fragment_runtime_info = 9;</code> |
| * |
| * <pre> |
| * Not supported/honored for external clients right now. |
| * </pre> |
| */ |
| public Builder setFragmentRuntimeInfo(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo value) { |
| if (fragmentRuntimeInfoBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| fragmentRuntimeInfo_ = value; |
| onChanged(); |
| } else { |
| fragmentRuntimeInfoBuilder_.setMessage(value); |
| } |
| bitField0_ |= 0x00000100; |
| return this; |
| } |
| /** |
| * <code>optional .FragmentRuntimeInfo fragment_runtime_info = 9;</code> |
| * |
| * <pre> |
| * Not supported/honored for external clients right now. |
| * </pre> |
| */ |
| public Builder setFragmentRuntimeInfo( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.Builder builderForValue) { |
| if (fragmentRuntimeInfoBuilder_ == null) { |
| fragmentRuntimeInfo_ = builderForValue.build(); |
| onChanged(); |
| } else { |
| fragmentRuntimeInfoBuilder_.setMessage(builderForValue.build()); |
| } |
| bitField0_ |= 0x00000100; |
| return this; |
| } |
| /** |
| * <code>optional .FragmentRuntimeInfo fragment_runtime_info = 9;</code> |
| * |
| * <pre> |
| * Not supported/honored for external clients right now. |
| * </pre> |
| */ |
| public Builder mergeFragmentRuntimeInfo(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo value) { |
| if (fragmentRuntimeInfoBuilder_ == null) { |
| if (((bitField0_ & 0x00000100) == 0x00000100) && |
| fragmentRuntimeInfo_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.getDefaultInstance()) { |
| fragmentRuntimeInfo_ = |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.newBuilder(fragmentRuntimeInfo_).mergeFrom(value).buildPartial(); |
| } else { |
| fragmentRuntimeInfo_ = value; |
| } |
| onChanged(); |
| } else { |
| fragmentRuntimeInfoBuilder_.mergeFrom(value); |
| } |
| bitField0_ |= 0x00000100; |
| return this; |
| } |
| /** |
| * <code>optional .FragmentRuntimeInfo fragment_runtime_info = 9;</code> |
| * |
| * <pre> |
| * Not supported/honored for external clients right now. |
| * </pre> |
| */ |
| public Builder clearFragmentRuntimeInfo() { |
| if (fragmentRuntimeInfoBuilder_ == null) { |
| fragmentRuntimeInfo_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.getDefaultInstance(); |
| onChanged(); |
| } else { |
| fragmentRuntimeInfoBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000100); |
| return this; |
| } |
| /** |
| * <code>optional .FragmentRuntimeInfo fragment_runtime_info = 9;</code> |
| * |
| * <pre> |
| * Not supported/honored for external clients right now. |
| * </pre> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.Builder getFragmentRuntimeInfoBuilder() { |
| bitField0_ |= 0x00000100; |
| onChanged(); |
| return getFragmentRuntimeInfoFieldBuilder().getBuilder(); |
| } |
| /** |
| * <code>optional .FragmentRuntimeInfo fragment_runtime_info = 9;</code> |
| * |
| * <pre> |
| * Not supported/honored for external clients right now. |
| * </pre> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfoOrBuilder getFragmentRuntimeInfoOrBuilder() { |
| if (fragmentRuntimeInfoBuilder_ != null) { |
| return fragmentRuntimeInfoBuilder_.getMessageOrBuilder(); |
| } else { |
| return fragmentRuntimeInfo_; |
| } |
| } |
| /** |
| * <code>optional .FragmentRuntimeInfo fragment_runtime_info = 9;</code> |
| * |
| * <pre> |
| * Not supported/honored for external clients right now. |
| * </pre> |
| */ |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfoOrBuilder> |
| getFragmentRuntimeInfoFieldBuilder() { |
| if (fragmentRuntimeInfoBuilder_ == null) { |
| fragmentRuntimeInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfoOrBuilder>( |
| fragmentRuntimeInfo_, |
| getParentForChildren(), |
| isClean()); |
| fragmentRuntimeInfo_ = null; |
| } |
| return fragmentRuntimeInfoBuilder_; |
| } |
| |
| // optional bytes initial_event_bytes = 10; |
| private com.google.protobuf.ByteString initialEventBytes_ = com.google.protobuf.ByteString.EMPTY; |
| /** |
| * <code>optional bytes initial_event_bytes = 10;</code> |
| * |
| * <pre> |
| * Serialized (and signed) NotTezEvent; used only for external clients for now. |
| * </pre> |
| */ |
| public boolean hasInitialEventBytes() { |
| return ((bitField0_ & 0x00000200) == 0x00000200); |
| } |
| /** |
| * <code>optional bytes initial_event_bytes = 10;</code> |
| * |
| * <pre> |
| * Serialized (and signed) NotTezEvent; used only for external clients for now. |
| * </pre> |
| */ |
| public com.google.protobuf.ByteString getInitialEventBytes() { |
| return initialEventBytes_; |
| } |
| /** |
| * <code>optional bytes initial_event_bytes = 10;</code> |
| * |
| * <pre> |
| * Serialized (and signed) NotTezEvent; used only for external clients for now. |
| * </pre> |
| */ |
| public Builder setInitialEventBytes(com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000200; |
| initialEventBytes_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional bytes initial_event_bytes = 10;</code> |
| * |
| * <pre> |
| * Serialized (and signed) NotTezEvent; used only for external clients for now. |
| * </pre> |
| */ |
| public Builder clearInitialEventBytes() { |
| bitField0_ = (bitField0_ & ~0x00000200); |
| initialEventBytes_ = getDefaultInstance().getInitialEventBytes(); |
| onChanged(); |
| return this; |
| } |
| |
| // optional bytes initial_event_signature = 11; |
| private com.google.protobuf.ByteString initialEventSignature_ = com.google.protobuf.ByteString.EMPTY; |
| /** |
| * <code>optional bytes initial_event_signature = 11;</code> |
| */ |
| public boolean hasInitialEventSignature() { |
| return ((bitField0_ & 0x00000400) == 0x00000400); |
| } |
| /** |
| * <code>optional bytes initial_event_signature = 11;</code> |
| */ |
| public com.google.protobuf.ByteString getInitialEventSignature() { |
| return initialEventSignature_; |
| } |
| /** |
| * <code>optional bytes initial_event_signature = 11;</code> |
| */ |
| public Builder setInitialEventSignature(com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000400; |
| initialEventSignature_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional bytes initial_event_signature = 11;</code> |
| */ |
| public Builder clearInitialEventSignature() { |
| bitField0_ = (bitField0_ & ~0x00000400); |
| initialEventSignature_ = getDefaultInstance().getInitialEventSignature(); |
| onChanged(); |
| return this; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:SubmitWorkRequestProto) |
| } |
| |
| static { |
| defaultInstance = new SubmitWorkRequestProto(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:SubmitWorkRequestProto) |
| } |
| |
| public interface SubmitWorkResponseProtoOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // optional .SubmissionStateProto submission_state = 1; |
| /** |
| * <code>optional .SubmissionStateProto submission_state = 1;</code> |
| */ |
| boolean hasSubmissionState(); |
| /** |
| * <code>optional .SubmissionStateProto submission_state = 1;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto getSubmissionState(); |
| } |
| /** |
| * Protobuf type {@code SubmitWorkResponseProto} |
| */ |
| public static final class SubmitWorkResponseProto extends |
| com.google.protobuf.GeneratedMessage |
| implements SubmitWorkResponseProtoOrBuilder { |
| // Use SubmitWorkResponseProto.newBuilder() to construct. |
| private SubmitWorkResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) { |
| super(builder); |
| this.unknownFields = builder.getUnknownFields(); |
| } |
| private SubmitWorkResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } |
| |
| private static final SubmitWorkResponseProto defaultInstance; |
| public static SubmitWorkResponseProto getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public SubmitWorkResponseProto getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| private final com.google.protobuf.UnknownFieldSet unknownFields; |
| @java.lang.Override |
| public final com.google.protobuf.UnknownFieldSet |
| getUnknownFields() { |
| return this.unknownFields; |
| } |
| private SubmitWorkResponseProto( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| initFields(); |
| int mutable_bitField0_ = 0; |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder(); |
| try { |
| boolean done = false; |
| while (!done) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| done = true; |
| break; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| done = true; |
| } |
| break; |
| } |
| case 8: { |
| int rawValue = input.readEnum(); |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto value = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto.valueOf(rawValue); |
| if (value == null) { |
| unknownFields.mergeVarintField(1, rawValue); |
| } else { |
| bitField0_ |= 0x00000001; |
| submissionState_ = value; |
| } |
| break; |
| } |
| } |
| } |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| throw e.setUnfinishedMessage(this); |
| } catch (java.io.IOException e) { |
| throw new com.google.protobuf.InvalidProtocolBufferException( |
| e.getMessage()).setUnfinishedMessage(this); |
| } finally { |
| this.unknownFields = unknownFields.build(); |
| makeExtensionsImmutable(); |
| } |
| } |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SubmitWorkResponseProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SubmitWorkResponseProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto.Builder.class); |
| } |
| |
| public static com.google.protobuf.Parser<SubmitWorkResponseProto> PARSER = |
| new com.google.protobuf.AbstractParser<SubmitWorkResponseProto>() { |
| public SubmitWorkResponseProto parsePartialFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return new SubmitWorkResponseProto(input, extensionRegistry); |
| } |
| }; |
| |
| @java.lang.Override |
| public com.google.protobuf.Parser<SubmitWorkResponseProto> getParserForType() { |
| return PARSER; |
| } |
| |
| private int bitField0_; |
| // optional .SubmissionStateProto submission_state = 1; |
| public static final int SUBMISSION_STATE_FIELD_NUMBER = 1; |
| private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto submissionState_; |
| /** |
| * <code>optional .SubmissionStateProto submission_state = 1;</code> |
| */ |
| public boolean hasSubmissionState() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional .SubmissionStateProto submission_state = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto getSubmissionState() { |
| return submissionState_; |
| } |
| |
| private void initFields() { |
| submissionState_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto.ACCEPTED; |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeEnum(1, submissionState_.getNumber()); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeEnumSize(1, submissionState_.getNumber()); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto) obj; |
| |
| boolean result = true; |
| result = result && (hasSubmissionState() == other.hasSubmissionState()); |
| if (hasSubmissionState()) { |
| result = result && |
| (getSubmissionState() == other.getSubmissionState()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| private int memoizedHashCode = 0; |
| @java.lang.Override |
| public int hashCode() { |
| if (memoizedHashCode != 0) { |
| return memoizedHashCode; |
| } |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasSubmissionState()) { |
| hash = (37 * hash) + SUBMISSION_STATE_FIELD_NUMBER; |
| hash = (53 * hash) + hashEnum(getSubmissionState()); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| memoizedHashCode = hash; |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| /** |
| * Protobuf type {@code SubmitWorkResponseProto} |
| */ |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProtoOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SubmitWorkResponseProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SubmitWorkResponseProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto.Builder.class); |
| } |
| |
| // Construct using org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| submissionState_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto.ACCEPTED; |
| bitField0_ = (bitField0_ & ~0x00000001); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SubmitWorkResponseProto_descriptor; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto getDefaultInstanceForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto build() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto buildPartial() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| result.submissionState_ = submissionState_; |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto) { |
| return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto other) { |
| if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto.getDefaultInstance()) return this; |
| if (other.hasSubmissionState()) { |
| setSubmissionState(other.getSubmissionState()); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto parsedMessage = null; |
| try { |
| parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto) e.getUnfinishedMessage(); |
| throw e; |
| } finally { |
| if (parsedMessage != null) { |
| mergeFrom(parsedMessage); |
| } |
| } |
| return this; |
| } |
| private int bitField0_; |
| |
| // optional .SubmissionStateProto submission_state = 1; |
| private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto submissionState_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto.ACCEPTED; |
| /** |
| * <code>optional .SubmissionStateProto submission_state = 1;</code> |
| */ |
| public boolean hasSubmissionState() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional .SubmissionStateProto submission_state = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto getSubmissionState() { |
| return submissionState_; |
| } |
| /** |
| * <code>optional .SubmissionStateProto submission_state = 1;</code> |
| */ |
| public Builder setSubmissionState(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000001; |
| submissionState_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional .SubmissionStateProto submission_state = 1;</code> |
| */ |
| public Builder clearSubmissionState() { |
| bitField0_ = (bitField0_ & ~0x00000001); |
| submissionState_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmissionStateProto.ACCEPTED; |
| onChanged(); |
| return this; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:SubmitWorkResponseProto) |
| } |
| |
| static { |
| defaultInstance = new SubmitWorkResponseProto(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:SubmitWorkResponseProto) |
| } |
| |
| public interface SourceStateUpdatedRequestProtoOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // optional .QueryIdentifierProto query_identifier = 1; |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| boolean hasQueryIdentifier(); |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier(); |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder(); |
| |
| // optional string src_name = 2; |
| /** |
| * <code>optional string src_name = 2;</code> |
| */ |
| boolean hasSrcName(); |
| /** |
| * <code>optional string src_name = 2;</code> |
| */ |
| java.lang.String getSrcName(); |
| /** |
| * <code>optional string src_name = 2;</code> |
| */ |
| com.google.protobuf.ByteString |
| getSrcNameBytes(); |
| |
| // optional .SourceStateProto state = 3; |
| /** |
| * <code>optional .SourceStateProto state = 3;</code> |
| */ |
| boolean hasState(); |
| /** |
| * <code>optional .SourceStateProto state = 3;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto getState(); |
| } |
| /** |
| * Protobuf type {@code SourceStateUpdatedRequestProto} |
| */ |
| public static final class SourceStateUpdatedRequestProto extends |
| com.google.protobuf.GeneratedMessage |
| implements SourceStateUpdatedRequestProtoOrBuilder { |
| // Use SourceStateUpdatedRequestProto.newBuilder() to construct. |
| private SourceStateUpdatedRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) { |
| super(builder); |
| this.unknownFields = builder.getUnknownFields(); |
| } |
| private SourceStateUpdatedRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } |
| |
| private static final SourceStateUpdatedRequestProto defaultInstance; |
| public static SourceStateUpdatedRequestProto getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public SourceStateUpdatedRequestProto getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| private final com.google.protobuf.UnknownFieldSet unknownFields; |
| @java.lang.Override |
| public final com.google.protobuf.UnknownFieldSet |
| getUnknownFields() { |
| return this.unknownFields; |
| } |
| private SourceStateUpdatedRequestProto( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| initFields(); |
| int mutable_bitField0_ = 0; |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder(); |
| try { |
| boolean done = false; |
| while (!done) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| done = true; |
| break; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| done = true; |
| } |
| break; |
| } |
| case 10: { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder subBuilder = null; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| subBuilder = queryIdentifier_.toBuilder(); |
| } |
| queryIdentifier_ = input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.PARSER, extensionRegistry); |
| if (subBuilder != null) { |
| subBuilder.mergeFrom(queryIdentifier_); |
| queryIdentifier_ = subBuilder.buildPartial(); |
| } |
| bitField0_ |= 0x00000001; |
| break; |
| } |
| case 18: { |
| bitField0_ |= 0x00000002; |
| srcName_ = input.readBytes(); |
| break; |
| } |
| case 24: { |
| int rawValue = input.readEnum(); |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto value = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto.valueOf(rawValue); |
| if (value == null) { |
| unknownFields.mergeVarintField(3, rawValue); |
| } else { |
| bitField0_ |= 0x00000004; |
| state_ = value; |
| } |
| break; |
| } |
| } |
| } |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| throw e.setUnfinishedMessage(this); |
| } catch (java.io.IOException e) { |
| throw new com.google.protobuf.InvalidProtocolBufferException( |
| e.getMessage()).setUnfinishedMessage(this); |
| } finally { |
| this.unknownFields = unknownFields.build(); |
| makeExtensionsImmutable(); |
| } |
| } |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedRequestProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedRequestProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto.Builder.class); |
| } |
| |
| public static com.google.protobuf.Parser<SourceStateUpdatedRequestProto> PARSER = |
| new com.google.protobuf.AbstractParser<SourceStateUpdatedRequestProto>() { |
| public SourceStateUpdatedRequestProto parsePartialFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return new SourceStateUpdatedRequestProto(input, extensionRegistry); |
| } |
| }; |
| |
| @java.lang.Override |
| public com.google.protobuf.Parser<SourceStateUpdatedRequestProto> getParserForType() { |
| return PARSER; |
| } |
| |
| private int bitField0_; |
| // optional .QueryIdentifierProto query_identifier = 1; |
| public static final int QUERY_IDENTIFIER_FIELD_NUMBER = 1; |
| private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_; |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public boolean hasQueryIdentifier() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() { |
| return queryIdentifier_; |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder() { |
| return queryIdentifier_; |
| } |
| |
| // optional string src_name = 2; |
| public static final int SRC_NAME_FIELD_NUMBER = 2; |
| private java.lang.Object srcName_; |
| /** |
| * <code>optional string src_name = 2;</code> |
| */ |
| public boolean hasSrcName() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional string src_name = 2;</code> |
| */ |
| public java.lang.String getSrcName() { |
| java.lang.Object ref = srcName_; |
| if (ref instanceof java.lang.String) { |
| return (java.lang.String) ref; |
| } else { |
| com.google.protobuf.ByteString bs = |
| (com.google.protobuf.ByteString) ref; |
| java.lang.String s = bs.toStringUtf8(); |
| if (bs.isValidUtf8()) { |
| srcName_ = s; |
| } |
| return s; |
| } |
| } |
| /** |
| * <code>optional string src_name = 2;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getSrcNameBytes() { |
| java.lang.Object ref = srcName_; |
| if (ref instanceof java.lang.String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| srcName_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| |
| // optional .SourceStateProto state = 3; |
| public static final int STATE_FIELD_NUMBER = 3; |
| private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto state_; |
| /** |
| * <code>optional .SourceStateProto state = 3;</code> |
| */ |
| public boolean hasState() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| /** |
| * <code>optional .SourceStateProto state = 3;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto getState() { |
| return state_; |
| } |
| |
| private void initFields() { |
| queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance(); |
| srcName_ = ""; |
| state_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto.S_SUCCEEDED; |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeMessage(1, queryIdentifier_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| output.writeBytes(2, getSrcNameBytes()); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| output.writeEnum(3, state_.getNumber()); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(1, queryIdentifier_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(2, getSrcNameBytes()); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeEnumSize(3, state_.getNumber()); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto) obj; |
| |
| boolean result = true; |
| result = result && (hasQueryIdentifier() == other.hasQueryIdentifier()); |
| if (hasQueryIdentifier()) { |
| result = result && getQueryIdentifier() |
| .equals(other.getQueryIdentifier()); |
| } |
| result = result && (hasSrcName() == other.hasSrcName()); |
| if (hasSrcName()) { |
| result = result && getSrcName() |
| .equals(other.getSrcName()); |
| } |
| result = result && (hasState() == other.hasState()); |
| if (hasState()) { |
| result = result && |
| (getState() == other.getState()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| private int memoizedHashCode = 0; |
| @java.lang.Override |
| public int hashCode() { |
| if (memoizedHashCode != 0) { |
| return memoizedHashCode; |
| } |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasQueryIdentifier()) { |
| hash = (37 * hash) + QUERY_IDENTIFIER_FIELD_NUMBER; |
| hash = (53 * hash) + getQueryIdentifier().hashCode(); |
| } |
| if (hasSrcName()) { |
| hash = (37 * hash) + SRC_NAME_FIELD_NUMBER; |
| hash = (53 * hash) + getSrcName().hashCode(); |
| } |
| if (hasState()) { |
| hash = (37 * hash) + STATE_FIELD_NUMBER; |
| hash = (53 * hash) + hashEnum(getState()); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| memoizedHashCode = hash; |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| /** |
| * Protobuf type {@code SourceStateUpdatedRequestProto} |
| */ |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProtoOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedRequestProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedRequestProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto.Builder.class); |
| } |
| |
| // Construct using org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| getQueryIdentifierFieldBuilder(); |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| if (queryIdentifierBuilder_ == null) { |
| queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance(); |
| } else { |
| queryIdentifierBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000001); |
| srcName_ = ""; |
| bitField0_ = (bitField0_ & ~0x00000002); |
| state_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto.S_SUCCEEDED; |
| bitField0_ = (bitField0_ & ~0x00000004); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedRequestProto_descriptor; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto getDefaultInstanceForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto build() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto buildPartial() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| if (queryIdentifierBuilder_ == null) { |
| result.queryIdentifier_ = queryIdentifier_; |
| } else { |
| result.queryIdentifier_ = queryIdentifierBuilder_.build(); |
| } |
| if (((from_bitField0_ & 0x00000002) == 0x00000002)) { |
| to_bitField0_ |= 0x00000002; |
| } |
| result.srcName_ = srcName_; |
| if (((from_bitField0_ & 0x00000004) == 0x00000004)) { |
| to_bitField0_ |= 0x00000004; |
| } |
| result.state_ = state_; |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto) { |
| return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto other) { |
| if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto.getDefaultInstance()) return this; |
| if (other.hasQueryIdentifier()) { |
| mergeQueryIdentifier(other.getQueryIdentifier()); |
| } |
| if (other.hasSrcName()) { |
| bitField0_ |= 0x00000002; |
| srcName_ = other.srcName_; |
| onChanged(); |
| } |
| if (other.hasState()) { |
| setState(other.getState()); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto parsedMessage = null; |
| try { |
| parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto) e.getUnfinishedMessage(); |
| throw e; |
| } finally { |
| if (parsedMessage != null) { |
| mergeFrom(parsedMessage); |
| } |
| } |
| return this; |
| } |
| private int bitField0_; |
| |
| // optional .QueryIdentifierProto query_identifier = 1; |
| private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance(); |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder> queryIdentifierBuilder_; |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public boolean hasQueryIdentifier() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() { |
| if (queryIdentifierBuilder_ == null) { |
| return queryIdentifier_; |
| } else { |
| return queryIdentifierBuilder_.getMessage(); |
| } |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public Builder setQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto value) { |
| if (queryIdentifierBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| queryIdentifier_ = value; |
| onChanged(); |
| } else { |
| queryIdentifierBuilder_.setMessage(value); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public Builder setQueryIdentifier( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder builderForValue) { |
| if (queryIdentifierBuilder_ == null) { |
| queryIdentifier_ = builderForValue.build(); |
| onChanged(); |
| } else { |
| queryIdentifierBuilder_.setMessage(builderForValue.build()); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto value) { |
| if (queryIdentifierBuilder_ == null) { |
| if (((bitField0_ & 0x00000001) == 0x00000001) && |
| queryIdentifier_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance()) { |
| queryIdentifier_ = |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.newBuilder(queryIdentifier_).mergeFrom(value).buildPartial(); |
| } else { |
| queryIdentifier_ = value; |
| } |
| onChanged(); |
| } else { |
| queryIdentifierBuilder_.mergeFrom(value); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public Builder clearQueryIdentifier() { |
| if (queryIdentifierBuilder_ == null) { |
| queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance(); |
| onChanged(); |
| } else { |
| queryIdentifierBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000001); |
| return this; |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder getQueryIdentifierBuilder() { |
| bitField0_ |= 0x00000001; |
| onChanged(); |
| return getQueryIdentifierFieldBuilder().getBuilder(); |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder() { |
| if (queryIdentifierBuilder_ != null) { |
| return queryIdentifierBuilder_.getMessageOrBuilder(); |
| } else { |
| return queryIdentifier_; |
| } |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder> |
| getQueryIdentifierFieldBuilder() { |
| if (queryIdentifierBuilder_ == null) { |
| queryIdentifierBuilder_ = new com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder>( |
| queryIdentifier_, |
| getParentForChildren(), |
| isClean()); |
| queryIdentifier_ = null; |
| } |
| return queryIdentifierBuilder_; |
| } |
| |
| // optional string src_name = 2; |
| private java.lang.Object srcName_ = ""; |
| /** |
| * <code>optional string src_name = 2;</code> |
| */ |
| public boolean hasSrcName() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional string src_name = 2;</code> |
| */ |
| public java.lang.String getSrcName() { |
| java.lang.Object ref = srcName_; |
| if (!(ref instanceof java.lang.String)) { |
| java.lang.String s = ((com.google.protobuf.ByteString) ref) |
| .toStringUtf8(); |
| srcName_ = s; |
| return s; |
| } else { |
| return (java.lang.String) ref; |
| } |
| } |
| /** |
| * <code>optional string src_name = 2;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getSrcNameBytes() { |
| java.lang.Object ref = srcName_; |
| if (ref instanceof String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| srcName_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| /** |
| * <code>optional string src_name = 2;</code> |
| */ |
| public Builder setSrcName( |
| java.lang.String value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000002; |
| srcName_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string src_name = 2;</code> |
| */ |
| public Builder clearSrcName() { |
| bitField0_ = (bitField0_ & ~0x00000002); |
| srcName_ = getDefaultInstance().getSrcName(); |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string src_name = 2;</code> |
| */ |
| public Builder setSrcNameBytes( |
| com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000002; |
| srcName_ = value; |
| onChanged(); |
| return this; |
| } |
| |
| // optional .SourceStateProto state = 3; |
| private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto state_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto.S_SUCCEEDED; |
| /** |
| * <code>optional .SourceStateProto state = 3;</code> |
| */ |
| public boolean hasState() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| /** |
| * <code>optional .SourceStateProto state = 3;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto getState() { |
| return state_; |
| } |
| /** |
| * <code>optional .SourceStateProto state = 3;</code> |
| */ |
| public Builder setState(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000004; |
| state_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional .SourceStateProto state = 3;</code> |
| */ |
| public Builder clearState() { |
| bitField0_ = (bitField0_ & ~0x00000004); |
| state_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateProto.S_SUCCEEDED; |
| onChanged(); |
| return this; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:SourceStateUpdatedRequestProto) |
| } |
| |
| static { |
| defaultInstance = new SourceStateUpdatedRequestProto(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:SourceStateUpdatedRequestProto) |
| } |
| |
| public interface SourceStateUpdatedResponseProtoOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| } |
| /** |
| * Protobuf type {@code SourceStateUpdatedResponseProto} |
| */ |
| public static final class SourceStateUpdatedResponseProto extends |
| com.google.protobuf.GeneratedMessage |
| implements SourceStateUpdatedResponseProtoOrBuilder { |
| // Use SourceStateUpdatedResponseProto.newBuilder() to construct. |
| private SourceStateUpdatedResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) { |
| super(builder); |
| this.unknownFields = builder.getUnknownFields(); |
| } |
| private SourceStateUpdatedResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } |
| |
| private static final SourceStateUpdatedResponseProto defaultInstance; |
| public static SourceStateUpdatedResponseProto getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public SourceStateUpdatedResponseProto getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| private final com.google.protobuf.UnknownFieldSet unknownFields; |
| @java.lang.Override |
| public final com.google.protobuf.UnknownFieldSet |
| getUnknownFields() { |
| return this.unknownFields; |
| } |
| private SourceStateUpdatedResponseProto( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| initFields(); |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder(); |
| try { |
| boolean done = false; |
| while (!done) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| done = true; |
| break; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| done = true; |
| } |
| break; |
| } |
| } |
| } |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| throw e.setUnfinishedMessage(this); |
| } catch (java.io.IOException e) { |
| throw new com.google.protobuf.InvalidProtocolBufferException( |
| e.getMessage()).setUnfinishedMessage(this); |
| } finally { |
| this.unknownFields = unknownFields.build(); |
| makeExtensionsImmutable(); |
| } |
| } |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedResponseProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedResponseProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto.Builder.class); |
| } |
| |
| public static com.google.protobuf.Parser<SourceStateUpdatedResponseProto> PARSER = |
| new com.google.protobuf.AbstractParser<SourceStateUpdatedResponseProto>() { |
| public SourceStateUpdatedResponseProto parsePartialFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return new SourceStateUpdatedResponseProto(input, extensionRegistry); |
| } |
| }; |
| |
| @java.lang.Override |
| public com.google.protobuf.Parser<SourceStateUpdatedResponseProto> getParserForType() { |
| return PARSER; |
| } |
| |
| private void initFields() { |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto) obj; |
| |
| boolean result = true; |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| private int memoizedHashCode = 0; |
| @java.lang.Override |
| public int hashCode() { |
| if (memoizedHashCode != 0) { |
| return memoizedHashCode; |
| } |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| memoizedHashCode = hash; |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| /** |
| * Protobuf type {@code SourceStateUpdatedResponseProto} |
| */ |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProtoOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedResponseProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedResponseProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto.Builder.class); |
| } |
| |
| // Construct using org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_SourceStateUpdatedResponseProto_descriptor; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto getDefaultInstanceForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto build() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto buildPartial() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto(this); |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto) { |
| return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto other) { |
| if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto.getDefaultInstance()) return this; |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto parsedMessage = null; |
| try { |
| parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto) e.getUnfinishedMessage(); |
| throw e; |
| } finally { |
| if (parsedMessage != null) { |
| mergeFrom(parsedMessage); |
| } |
| } |
| return this; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:SourceStateUpdatedResponseProto) |
| } |
| |
| static { |
| defaultInstance = new SourceStateUpdatedResponseProto(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:SourceStateUpdatedResponseProto) |
| } |
| |
| public interface QueryCompleteRequestProtoOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // optional .QueryIdentifierProto query_identifier = 1; |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| boolean hasQueryIdentifier(); |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier(); |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder(); |
| |
| // optional int64 delete_delay = 2 [default = 0]; |
| /** |
| * <code>optional int64 delete_delay = 2 [default = 0];</code> |
| */ |
| boolean hasDeleteDelay(); |
| /** |
| * <code>optional int64 delete_delay = 2 [default = 0];</code> |
| */ |
| long getDeleteDelay(); |
| } |
| /** |
| * Protobuf type {@code QueryCompleteRequestProto} |
| */ |
| public static final class QueryCompleteRequestProto extends |
| com.google.protobuf.GeneratedMessage |
| implements QueryCompleteRequestProtoOrBuilder { |
| // Use QueryCompleteRequestProto.newBuilder() to construct. |
| private QueryCompleteRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) { |
| super(builder); |
| this.unknownFields = builder.getUnknownFields(); |
| } |
| private QueryCompleteRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } |
| |
| private static final QueryCompleteRequestProto defaultInstance; |
| public static QueryCompleteRequestProto getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public QueryCompleteRequestProto getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| private final com.google.protobuf.UnknownFieldSet unknownFields; |
| @java.lang.Override |
| public final com.google.protobuf.UnknownFieldSet |
| getUnknownFields() { |
| return this.unknownFields; |
| } |
| private QueryCompleteRequestProto( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| initFields(); |
| int mutable_bitField0_ = 0; |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder(); |
| try { |
| boolean done = false; |
| while (!done) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| done = true; |
| break; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| done = true; |
| } |
| break; |
| } |
| case 10: { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder subBuilder = null; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| subBuilder = queryIdentifier_.toBuilder(); |
| } |
| queryIdentifier_ = input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.PARSER, extensionRegistry); |
| if (subBuilder != null) { |
| subBuilder.mergeFrom(queryIdentifier_); |
| queryIdentifier_ = subBuilder.buildPartial(); |
| } |
| bitField0_ |= 0x00000001; |
| break; |
| } |
| case 16: { |
| bitField0_ |= 0x00000002; |
| deleteDelay_ = input.readInt64(); |
| break; |
| } |
| } |
| } |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| throw e.setUnfinishedMessage(this); |
| } catch (java.io.IOException e) { |
| throw new com.google.protobuf.InvalidProtocolBufferException( |
| e.getMessage()).setUnfinishedMessage(this); |
| } finally { |
| this.unknownFields = unknownFields.build(); |
| makeExtensionsImmutable(); |
| } |
| } |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteRequestProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteRequestProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto.Builder.class); |
| } |
| |
| public static com.google.protobuf.Parser<QueryCompleteRequestProto> PARSER = |
| new com.google.protobuf.AbstractParser<QueryCompleteRequestProto>() { |
| public QueryCompleteRequestProto parsePartialFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return new QueryCompleteRequestProto(input, extensionRegistry); |
| } |
| }; |
| |
| @java.lang.Override |
| public com.google.protobuf.Parser<QueryCompleteRequestProto> getParserForType() { |
| return PARSER; |
| } |
| |
| private int bitField0_; |
| // optional .QueryIdentifierProto query_identifier = 1; |
| public static final int QUERY_IDENTIFIER_FIELD_NUMBER = 1; |
| private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_; |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public boolean hasQueryIdentifier() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() { |
| return queryIdentifier_; |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder() { |
| return queryIdentifier_; |
| } |
| |
| // optional int64 delete_delay = 2 [default = 0]; |
| public static final int DELETE_DELAY_FIELD_NUMBER = 2; |
| private long deleteDelay_; |
| /** |
| * <code>optional int64 delete_delay = 2 [default = 0];</code> |
| */ |
| public boolean hasDeleteDelay() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional int64 delete_delay = 2 [default = 0];</code> |
| */ |
| public long getDeleteDelay() { |
| return deleteDelay_; |
| } |
| |
| private void initFields() { |
| queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance(); |
| deleteDelay_ = 0L; |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeMessage(1, queryIdentifier_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| output.writeInt64(2, deleteDelay_); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(1, queryIdentifier_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeInt64Size(2, deleteDelay_); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto) obj; |
| |
| boolean result = true; |
| result = result && (hasQueryIdentifier() == other.hasQueryIdentifier()); |
| if (hasQueryIdentifier()) { |
| result = result && getQueryIdentifier() |
| .equals(other.getQueryIdentifier()); |
| } |
| result = result && (hasDeleteDelay() == other.hasDeleteDelay()); |
| if (hasDeleteDelay()) { |
| result = result && (getDeleteDelay() |
| == other.getDeleteDelay()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| private int memoizedHashCode = 0; |
| @java.lang.Override |
| public int hashCode() { |
| if (memoizedHashCode != 0) { |
| return memoizedHashCode; |
| } |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasQueryIdentifier()) { |
| hash = (37 * hash) + QUERY_IDENTIFIER_FIELD_NUMBER; |
| hash = (53 * hash) + getQueryIdentifier().hashCode(); |
| } |
| if (hasDeleteDelay()) { |
| hash = (37 * hash) + DELETE_DELAY_FIELD_NUMBER; |
| hash = (53 * hash) + hashLong(getDeleteDelay()); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| memoizedHashCode = hash; |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| /** |
| * Protobuf type {@code QueryCompleteRequestProto} |
| */ |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProtoOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteRequestProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteRequestProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto.Builder.class); |
| } |
| |
| // Construct using org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| getQueryIdentifierFieldBuilder(); |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| if (queryIdentifierBuilder_ == null) { |
| queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance(); |
| } else { |
| queryIdentifierBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000001); |
| deleteDelay_ = 0L; |
| bitField0_ = (bitField0_ & ~0x00000002); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteRequestProto_descriptor; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto getDefaultInstanceForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto build() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto buildPartial() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| if (queryIdentifierBuilder_ == null) { |
| result.queryIdentifier_ = queryIdentifier_; |
| } else { |
| result.queryIdentifier_ = queryIdentifierBuilder_.build(); |
| } |
| if (((from_bitField0_ & 0x00000002) == 0x00000002)) { |
| to_bitField0_ |= 0x00000002; |
| } |
| result.deleteDelay_ = deleteDelay_; |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto) { |
| return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto other) { |
| if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto.getDefaultInstance()) return this; |
| if (other.hasQueryIdentifier()) { |
| mergeQueryIdentifier(other.getQueryIdentifier()); |
| } |
| if (other.hasDeleteDelay()) { |
| setDeleteDelay(other.getDeleteDelay()); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto parsedMessage = null; |
| try { |
| parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto) e.getUnfinishedMessage(); |
| throw e; |
| } finally { |
| if (parsedMessage != null) { |
| mergeFrom(parsedMessage); |
| } |
| } |
| return this; |
| } |
| private int bitField0_; |
| |
| // optional .QueryIdentifierProto query_identifier = 1; |
| private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance(); |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder> queryIdentifierBuilder_; |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public boolean hasQueryIdentifier() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() { |
| if (queryIdentifierBuilder_ == null) { |
| return queryIdentifier_; |
| } else { |
| return queryIdentifierBuilder_.getMessage(); |
| } |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public Builder setQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto value) { |
| if (queryIdentifierBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| queryIdentifier_ = value; |
| onChanged(); |
| } else { |
| queryIdentifierBuilder_.setMessage(value); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public Builder setQueryIdentifier( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder builderForValue) { |
| if (queryIdentifierBuilder_ == null) { |
| queryIdentifier_ = builderForValue.build(); |
| onChanged(); |
| } else { |
| queryIdentifierBuilder_.setMessage(builderForValue.build()); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto value) { |
| if (queryIdentifierBuilder_ == null) { |
| if (((bitField0_ & 0x00000001) == 0x00000001) && |
| queryIdentifier_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance()) { |
| queryIdentifier_ = |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.newBuilder(queryIdentifier_).mergeFrom(value).buildPartial(); |
| } else { |
| queryIdentifier_ = value; |
| } |
| onChanged(); |
| } else { |
| queryIdentifierBuilder_.mergeFrom(value); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public Builder clearQueryIdentifier() { |
| if (queryIdentifierBuilder_ == null) { |
| queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance(); |
| onChanged(); |
| } else { |
| queryIdentifierBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000001); |
| return this; |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder getQueryIdentifierBuilder() { |
| bitField0_ |= 0x00000001; |
| onChanged(); |
| return getQueryIdentifierFieldBuilder().getBuilder(); |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder() { |
| if (queryIdentifierBuilder_ != null) { |
| return queryIdentifierBuilder_.getMessageOrBuilder(); |
| } else { |
| return queryIdentifier_; |
| } |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder> |
| getQueryIdentifierFieldBuilder() { |
| if (queryIdentifierBuilder_ == null) { |
| queryIdentifierBuilder_ = new com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder>( |
| queryIdentifier_, |
| getParentForChildren(), |
| isClean()); |
| queryIdentifier_ = null; |
| } |
| return queryIdentifierBuilder_; |
| } |
| |
| // optional int64 delete_delay = 2 [default = 0]; |
| private long deleteDelay_ ; |
| /** |
| * <code>optional int64 delete_delay = 2 [default = 0];</code> |
| */ |
| public boolean hasDeleteDelay() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional int64 delete_delay = 2 [default = 0];</code> |
| */ |
| public long getDeleteDelay() { |
| return deleteDelay_; |
| } |
| /** |
| * <code>optional int64 delete_delay = 2 [default = 0];</code> |
| */ |
| public Builder setDeleteDelay(long value) { |
| bitField0_ |= 0x00000002; |
| deleteDelay_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional int64 delete_delay = 2 [default = 0];</code> |
| */ |
| public Builder clearDeleteDelay() { |
| bitField0_ = (bitField0_ & ~0x00000002); |
| deleteDelay_ = 0L; |
| onChanged(); |
| return this; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:QueryCompleteRequestProto) |
| } |
| |
| static { |
| defaultInstance = new QueryCompleteRequestProto(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:QueryCompleteRequestProto) |
| } |
| |
| public interface QueryCompleteResponseProtoOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| } |
| /** |
| * Protobuf type {@code QueryCompleteResponseProto} |
| */ |
| public static final class QueryCompleteResponseProto extends |
| com.google.protobuf.GeneratedMessage |
| implements QueryCompleteResponseProtoOrBuilder { |
| // Use QueryCompleteResponseProto.newBuilder() to construct. |
| private QueryCompleteResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) { |
| super(builder); |
| this.unknownFields = builder.getUnknownFields(); |
| } |
| private QueryCompleteResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } |
| |
| private static final QueryCompleteResponseProto defaultInstance; |
| public static QueryCompleteResponseProto getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public QueryCompleteResponseProto getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| private final com.google.protobuf.UnknownFieldSet unknownFields; |
| @java.lang.Override |
| public final com.google.protobuf.UnknownFieldSet |
| getUnknownFields() { |
| return this.unknownFields; |
| } |
| private QueryCompleteResponseProto( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| initFields(); |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder(); |
| try { |
| boolean done = false; |
| while (!done) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| done = true; |
| break; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| done = true; |
| } |
| break; |
| } |
| } |
| } |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| throw e.setUnfinishedMessage(this); |
| } catch (java.io.IOException e) { |
| throw new com.google.protobuf.InvalidProtocolBufferException( |
| e.getMessage()).setUnfinishedMessage(this); |
| } finally { |
| this.unknownFields = unknownFields.build(); |
| makeExtensionsImmutable(); |
| } |
| } |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteResponseProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteResponseProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto.Builder.class); |
| } |
| |
| public static com.google.protobuf.Parser<QueryCompleteResponseProto> PARSER = |
| new com.google.protobuf.AbstractParser<QueryCompleteResponseProto>() { |
| public QueryCompleteResponseProto parsePartialFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return new QueryCompleteResponseProto(input, extensionRegistry); |
| } |
| }; |
| |
| @java.lang.Override |
| public com.google.protobuf.Parser<QueryCompleteResponseProto> getParserForType() { |
| return PARSER; |
| } |
| |
| private void initFields() { |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto) obj; |
| |
| boolean result = true; |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| private int memoizedHashCode = 0; |
| @java.lang.Override |
| public int hashCode() { |
| if (memoizedHashCode != 0) { |
| return memoizedHashCode; |
| } |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| memoizedHashCode = hash; |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| /** |
| * Protobuf type {@code QueryCompleteResponseProto} |
| */ |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProtoOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteResponseProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteResponseProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto.Builder.class); |
| } |
| |
| // Construct using org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_QueryCompleteResponseProto_descriptor; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto getDefaultInstanceForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto build() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto buildPartial() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto(this); |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto) { |
| return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto other) { |
| if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto.getDefaultInstance()) return this; |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto parsedMessage = null; |
| try { |
| parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto) e.getUnfinishedMessage(); |
| throw e; |
| } finally { |
| if (parsedMessage != null) { |
| mergeFrom(parsedMessage); |
| } |
| } |
| return this; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:QueryCompleteResponseProto) |
| } |
| |
| static { |
| defaultInstance = new QueryCompleteResponseProto(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:QueryCompleteResponseProto) |
| } |
| |
| public interface TerminateFragmentRequestProtoOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // optional .QueryIdentifierProto query_identifier = 1; |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| boolean hasQueryIdentifier(); |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier(); |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder(); |
| |
| // optional string fragment_identifier_string = 2; |
| /** |
| * <code>optional string fragment_identifier_string = 2;</code> |
| */ |
| boolean hasFragmentIdentifierString(); |
| /** |
| * <code>optional string fragment_identifier_string = 2;</code> |
| */ |
| java.lang.String getFragmentIdentifierString(); |
| /** |
| * <code>optional string fragment_identifier_string = 2;</code> |
| */ |
| com.google.protobuf.ByteString |
| getFragmentIdentifierStringBytes(); |
| } |
| /** |
| * Protobuf type {@code TerminateFragmentRequestProto} |
| */ |
| public static final class TerminateFragmentRequestProto extends |
| com.google.protobuf.GeneratedMessage |
| implements TerminateFragmentRequestProtoOrBuilder { |
| // Use TerminateFragmentRequestProto.newBuilder() to construct. |
| private TerminateFragmentRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) { |
| super(builder); |
| this.unknownFields = builder.getUnknownFields(); |
| } |
| private TerminateFragmentRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } |
| |
| private static final TerminateFragmentRequestProto defaultInstance; |
| public static TerminateFragmentRequestProto getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public TerminateFragmentRequestProto getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| private final com.google.protobuf.UnknownFieldSet unknownFields; |
| @java.lang.Override |
| public final com.google.protobuf.UnknownFieldSet |
| getUnknownFields() { |
| return this.unknownFields; |
| } |
| private TerminateFragmentRequestProto( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| initFields(); |
| int mutable_bitField0_ = 0; |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder(); |
| try { |
| boolean done = false; |
| while (!done) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| done = true; |
| break; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| done = true; |
| } |
| break; |
| } |
| case 10: { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder subBuilder = null; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| subBuilder = queryIdentifier_.toBuilder(); |
| } |
| queryIdentifier_ = input.readMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.PARSER, extensionRegistry); |
| if (subBuilder != null) { |
| subBuilder.mergeFrom(queryIdentifier_); |
| queryIdentifier_ = subBuilder.buildPartial(); |
| } |
| bitField0_ |= 0x00000001; |
| break; |
| } |
| case 18: { |
| bitField0_ |= 0x00000002; |
| fragmentIdentifierString_ = input.readBytes(); |
| break; |
| } |
| } |
| } |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| throw e.setUnfinishedMessage(this); |
| } catch (java.io.IOException e) { |
| throw new com.google.protobuf.InvalidProtocolBufferException( |
| e.getMessage()).setUnfinishedMessage(this); |
| } finally { |
| this.unknownFields = unknownFields.build(); |
| makeExtensionsImmutable(); |
| } |
| } |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentRequestProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentRequestProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto.Builder.class); |
| } |
| |
| public static com.google.protobuf.Parser<TerminateFragmentRequestProto> PARSER = |
| new com.google.protobuf.AbstractParser<TerminateFragmentRequestProto>() { |
| public TerminateFragmentRequestProto parsePartialFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return new TerminateFragmentRequestProto(input, extensionRegistry); |
| } |
| }; |
| |
| @java.lang.Override |
| public com.google.protobuf.Parser<TerminateFragmentRequestProto> getParserForType() { |
| return PARSER; |
| } |
| |
| private int bitField0_; |
| // optional .QueryIdentifierProto query_identifier = 1; |
| public static final int QUERY_IDENTIFIER_FIELD_NUMBER = 1; |
| private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_; |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public boolean hasQueryIdentifier() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() { |
| return queryIdentifier_; |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder() { |
| return queryIdentifier_; |
| } |
| |
| // optional string fragment_identifier_string = 2; |
| public static final int FRAGMENT_IDENTIFIER_STRING_FIELD_NUMBER = 2; |
| private java.lang.Object fragmentIdentifierString_; |
| /** |
| * <code>optional string fragment_identifier_string = 2;</code> |
| */ |
| public boolean hasFragmentIdentifierString() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional string fragment_identifier_string = 2;</code> |
| */ |
| public java.lang.String getFragmentIdentifierString() { |
| java.lang.Object ref = fragmentIdentifierString_; |
| if (ref instanceof java.lang.String) { |
| return (java.lang.String) ref; |
| } else { |
| com.google.protobuf.ByteString bs = |
| (com.google.protobuf.ByteString) ref; |
| java.lang.String s = bs.toStringUtf8(); |
| if (bs.isValidUtf8()) { |
| fragmentIdentifierString_ = s; |
| } |
| return s; |
| } |
| } |
| /** |
| * <code>optional string fragment_identifier_string = 2;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getFragmentIdentifierStringBytes() { |
| java.lang.Object ref = fragmentIdentifierString_; |
| if (ref instanceof java.lang.String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| fragmentIdentifierString_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| |
| private void initFields() { |
| queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance(); |
| fragmentIdentifierString_ = ""; |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeMessage(1, queryIdentifier_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| output.writeBytes(2, getFragmentIdentifierStringBytes()); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(1, queryIdentifier_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(2, getFragmentIdentifierStringBytes()); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto) obj; |
| |
| boolean result = true; |
| result = result && (hasQueryIdentifier() == other.hasQueryIdentifier()); |
| if (hasQueryIdentifier()) { |
| result = result && getQueryIdentifier() |
| .equals(other.getQueryIdentifier()); |
| } |
| result = result && (hasFragmentIdentifierString() == other.hasFragmentIdentifierString()); |
| if (hasFragmentIdentifierString()) { |
| result = result && getFragmentIdentifierString() |
| .equals(other.getFragmentIdentifierString()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| private int memoizedHashCode = 0; |
| @java.lang.Override |
| public int hashCode() { |
| if (memoizedHashCode != 0) { |
| return memoizedHashCode; |
| } |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasQueryIdentifier()) { |
| hash = (37 * hash) + QUERY_IDENTIFIER_FIELD_NUMBER; |
| hash = (53 * hash) + getQueryIdentifier().hashCode(); |
| } |
| if (hasFragmentIdentifierString()) { |
| hash = (37 * hash) + FRAGMENT_IDENTIFIER_STRING_FIELD_NUMBER; |
| hash = (53 * hash) + getFragmentIdentifierString().hashCode(); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| memoizedHashCode = hash; |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| /** |
| * Protobuf type {@code TerminateFragmentRequestProto} |
| */ |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProtoOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentRequestProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentRequestProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto.Builder.class); |
| } |
| |
| // Construct using org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| getQueryIdentifierFieldBuilder(); |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| if (queryIdentifierBuilder_ == null) { |
| queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance(); |
| } else { |
| queryIdentifierBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000001); |
| fragmentIdentifierString_ = ""; |
| bitField0_ = (bitField0_ & ~0x00000002); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentRequestProto_descriptor; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto getDefaultInstanceForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto build() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto buildPartial() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| if (queryIdentifierBuilder_ == null) { |
| result.queryIdentifier_ = queryIdentifier_; |
| } else { |
| result.queryIdentifier_ = queryIdentifierBuilder_.build(); |
| } |
| if (((from_bitField0_ & 0x00000002) == 0x00000002)) { |
| to_bitField0_ |= 0x00000002; |
| } |
| result.fragmentIdentifierString_ = fragmentIdentifierString_; |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto) { |
| return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto other) { |
| if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto.getDefaultInstance()) return this; |
| if (other.hasQueryIdentifier()) { |
| mergeQueryIdentifier(other.getQueryIdentifier()); |
| } |
| if (other.hasFragmentIdentifierString()) { |
| bitField0_ |= 0x00000002; |
| fragmentIdentifierString_ = other.fragmentIdentifierString_; |
| onChanged(); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto parsedMessage = null; |
| try { |
| parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto) e.getUnfinishedMessage(); |
| throw e; |
| } finally { |
| if (parsedMessage != null) { |
| mergeFrom(parsedMessage); |
| } |
| } |
| return this; |
| } |
| private int bitField0_; |
| |
| // optional .QueryIdentifierProto query_identifier = 1; |
| private org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance(); |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder> queryIdentifierBuilder_; |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public boolean hasQueryIdentifier() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto getQueryIdentifier() { |
| if (queryIdentifierBuilder_ == null) { |
| return queryIdentifier_; |
| } else { |
| return queryIdentifierBuilder_.getMessage(); |
| } |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public Builder setQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto value) { |
| if (queryIdentifierBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| queryIdentifier_ = value; |
| onChanged(); |
| } else { |
| queryIdentifierBuilder_.setMessage(value); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public Builder setQueryIdentifier( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder builderForValue) { |
| if (queryIdentifierBuilder_ == null) { |
| queryIdentifier_ = builderForValue.build(); |
| onChanged(); |
| } else { |
| queryIdentifierBuilder_.setMessage(builderForValue.build()); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public Builder mergeQueryIdentifier(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto value) { |
| if (queryIdentifierBuilder_ == null) { |
| if (((bitField0_ & 0x00000001) == 0x00000001) && |
| queryIdentifier_ != org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance()) { |
| queryIdentifier_ = |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.newBuilder(queryIdentifier_).mergeFrom(value).buildPartial(); |
| } else { |
| queryIdentifier_ = value; |
| } |
| onChanged(); |
| } else { |
| queryIdentifierBuilder_.mergeFrom(value); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public Builder clearQueryIdentifier() { |
| if (queryIdentifierBuilder_ == null) { |
| queryIdentifier_ = org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.getDefaultInstance(); |
| onChanged(); |
| } else { |
| queryIdentifierBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000001); |
| return this; |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder getQueryIdentifierBuilder() { |
| bitField0_ |= 0x00000001; |
| onChanged(); |
| return getQueryIdentifierFieldBuilder().getBuilder(); |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder getQueryIdentifierOrBuilder() { |
| if (queryIdentifierBuilder_ != null) { |
| return queryIdentifierBuilder_.getMessageOrBuilder(); |
| } else { |
| return queryIdentifier_; |
| } |
| } |
| /** |
| * <code>optional .QueryIdentifierProto query_identifier = 1;</code> |
| */ |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder> |
| getQueryIdentifierFieldBuilder() { |
| if (queryIdentifierBuilder_ == null) { |
| queryIdentifierBuilder_ = new com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProto.Builder, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryIdentifierProtoOrBuilder>( |
| queryIdentifier_, |
| getParentForChildren(), |
| isClean()); |
| queryIdentifier_ = null; |
| } |
| return queryIdentifierBuilder_; |
| } |
| |
| // optional string fragment_identifier_string = 2; |
| private java.lang.Object fragmentIdentifierString_ = ""; |
| /** |
| * <code>optional string fragment_identifier_string = 2;</code> |
| */ |
| public boolean hasFragmentIdentifierString() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional string fragment_identifier_string = 2;</code> |
| */ |
| public java.lang.String getFragmentIdentifierString() { |
| java.lang.Object ref = fragmentIdentifierString_; |
| if (!(ref instanceof java.lang.String)) { |
| java.lang.String s = ((com.google.protobuf.ByteString) ref) |
| .toStringUtf8(); |
| fragmentIdentifierString_ = s; |
| return s; |
| } else { |
| return (java.lang.String) ref; |
| } |
| } |
| /** |
| * <code>optional string fragment_identifier_string = 2;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getFragmentIdentifierStringBytes() { |
| java.lang.Object ref = fragmentIdentifierString_; |
| if (ref instanceof String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| fragmentIdentifierString_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| /** |
| * <code>optional string fragment_identifier_string = 2;</code> |
| */ |
| public Builder setFragmentIdentifierString( |
| java.lang.String value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000002; |
| fragmentIdentifierString_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string fragment_identifier_string = 2;</code> |
| */ |
| public Builder clearFragmentIdentifierString() { |
| bitField0_ = (bitField0_ & ~0x00000002); |
| fragmentIdentifierString_ = getDefaultInstance().getFragmentIdentifierString(); |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string fragment_identifier_string = 2;</code> |
| */ |
| public Builder setFragmentIdentifierStringBytes( |
| com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000002; |
| fragmentIdentifierString_ = value; |
| onChanged(); |
| return this; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:TerminateFragmentRequestProto) |
| } |
| |
| static { |
| defaultInstance = new TerminateFragmentRequestProto(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:TerminateFragmentRequestProto) |
| } |
| |
| public interface TerminateFragmentResponseProtoOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| } |
| /** |
| * Protobuf type {@code TerminateFragmentResponseProto} |
| */ |
| public static final class TerminateFragmentResponseProto extends |
| com.google.protobuf.GeneratedMessage |
| implements TerminateFragmentResponseProtoOrBuilder { |
| // Use TerminateFragmentResponseProto.newBuilder() to construct. |
| private TerminateFragmentResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) { |
| super(builder); |
| this.unknownFields = builder.getUnknownFields(); |
| } |
| private TerminateFragmentResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } |
| |
| private static final TerminateFragmentResponseProto defaultInstance; |
| public static TerminateFragmentResponseProto getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public TerminateFragmentResponseProto getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| private final com.google.protobuf.UnknownFieldSet unknownFields; |
| @java.lang.Override |
| public final com.google.protobuf.UnknownFieldSet |
| getUnknownFields() { |
| return this.unknownFields; |
| } |
| private TerminateFragmentResponseProto( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| initFields(); |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder(); |
| try { |
| boolean done = false; |
| while (!done) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| done = true; |
| break; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| done = true; |
| } |
| break; |
| } |
| } |
| } |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| throw e.setUnfinishedMessage(this); |
| } catch (java.io.IOException e) { |
| throw new com.google.protobuf.InvalidProtocolBufferException( |
| e.getMessage()).setUnfinishedMessage(this); |
| } finally { |
| this.unknownFields = unknownFields.build(); |
| makeExtensionsImmutable(); |
| } |
| } |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentResponseProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentResponseProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto.Builder.class); |
| } |
| |
| public static com.google.protobuf.Parser<TerminateFragmentResponseProto> PARSER = |
| new com.google.protobuf.AbstractParser<TerminateFragmentResponseProto>() { |
| public TerminateFragmentResponseProto parsePartialFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return new TerminateFragmentResponseProto(input, extensionRegistry); |
| } |
| }; |
| |
| @java.lang.Override |
| public com.google.protobuf.Parser<TerminateFragmentResponseProto> getParserForType() { |
| return PARSER; |
| } |
| |
| private void initFields() { |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto) obj; |
| |
| boolean result = true; |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| private int memoizedHashCode = 0; |
| @java.lang.Override |
| public int hashCode() { |
| if (memoizedHashCode != 0) { |
| return memoizedHashCode; |
| } |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| memoizedHashCode = hash; |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| /** |
| * Protobuf type {@code TerminateFragmentResponseProto} |
| */ |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProtoOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentResponseProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentResponseProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto.Builder.class); |
| } |
| |
| // Construct using org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_TerminateFragmentResponseProto_descriptor; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto getDefaultInstanceForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto build() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto buildPartial() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto(this); |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto) { |
| return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto other) { |
| if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto.getDefaultInstance()) return this; |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto parsedMessage = null; |
| try { |
| parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto) e.getUnfinishedMessage(); |
| throw e; |
| } finally { |
| if (parsedMessage != null) { |
| mergeFrom(parsedMessage); |
| } |
| } |
| return this; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:TerminateFragmentResponseProto) |
| } |
| |
| static { |
| defaultInstance = new TerminateFragmentResponseProto(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:TerminateFragmentResponseProto) |
| } |
| |
| public interface GetTokenRequestProtoOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // optional string app_id = 1; |
| /** |
| * <code>optional string app_id = 1;</code> |
| */ |
| boolean hasAppId(); |
| /** |
| * <code>optional string app_id = 1;</code> |
| */ |
| java.lang.String getAppId(); |
| /** |
| * <code>optional string app_id = 1;</code> |
| */ |
| com.google.protobuf.ByteString |
| getAppIdBytes(); |
| } |
| /** |
| * Protobuf type {@code GetTokenRequestProto} |
| */ |
| public static final class GetTokenRequestProto extends |
| com.google.protobuf.GeneratedMessage |
| implements GetTokenRequestProtoOrBuilder { |
| // Use GetTokenRequestProto.newBuilder() to construct. |
| private GetTokenRequestProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) { |
| super(builder); |
| this.unknownFields = builder.getUnknownFields(); |
| } |
| private GetTokenRequestProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } |
| |
| private static final GetTokenRequestProto defaultInstance; |
| public static GetTokenRequestProto getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public GetTokenRequestProto getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| private final com.google.protobuf.UnknownFieldSet unknownFields; |
| @java.lang.Override |
| public final com.google.protobuf.UnknownFieldSet |
| getUnknownFields() { |
| return this.unknownFields; |
| } |
| private GetTokenRequestProto( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| initFields(); |
| int mutable_bitField0_ = 0; |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder(); |
| try { |
| boolean done = false; |
| while (!done) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| done = true; |
| break; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| done = true; |
| } |
| break; |
| } |
| case 10: { |
| bitField0_ |= 0x00000001; |
| appId_ = input.readBytes(); |
| break; |
| } |
| } |
| } |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| throw e.setUnfinishedMessage(this); |
| } catch (java.io.IOException e) { |
| throw new com.google.protobuf.InvalidProtocolBufferException( |
| e.getMessage()).setUnfinishedMessage(this); |
| } finally { |
| this.unknownFields = unknownFields.build(); |
| makeExtensionsImmutable(); |
| } |
| } |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_GetTokenRequestProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_GetTokenRequestProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto.Builder.class); |
| } |
| |
| public static com.google.protobuf.Parser<GetTokenRequestProto> PARSER = |
| new com.google.protobuf.AbstractParser<GetTokenRequestProto>() { |
| public GetTokenRequestProto parsePartialFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return new GetTokenRequestProto(input, extensionRegistry); |
| } |
| }; |
| |
| @java.lang.Override |
| public com.google.protobuf.Parser<GetTokenRequestProto> getParserForType() { |
| return PARSER; |
| } |
| |
| private int bitField0_; |
| // optional string app_id = 1; |
| public static final int APP_ID_FIELD_NUMBER = 1; |
| private java.lang.Object appId_; |
| /** |
| * <code>optional string app_id = 1;</code> |
| */ |
| public boolean hasAppId() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional string app_id = 1;</code> |
| */ |
| public java.lang.String getAppId() { |
| java.lang.Object ref = appId_; |
| if (ref instanceof java.lang.String) { |
| return (java.lang.String) ref; |
| } else { |
| com.google.protobuf.ByteString bs = |
| (com.google.protobuf.ByteString) ref; |
| java.lang.String s = bs.toStringUtf8(); |
| if (bs.isValidUtf8()) { |
| appId_ = s; |
| } |
| return s; |
| } |
| } |
| /** |
| * <code>optional string app_id = 1;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getAppIdBytes() { |
| java.lang.Object ref = appId_; |
| if (ref instanceof java.lang.String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| appId_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| |
| private void initFields() { |
| appId_ = ""; |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeBytes(1, getAppIdBytes()); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(1, getAppIdBytes()); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto) obj; |
| |
| boolean result = true; |
| result = result && (hasAppId() == other.hasAppId()); |
| if (hasAppId()) { |
| result = result && getAppId() |
| .equals(other.getAppId()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| private int memoizedHashCode = 0; |
| @java.lang.Override |
| public int hashCode() { |
| if (memoizedHashCode != 0) { |
| return memoizedHashCode; |
| } |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasAppId()) { |
| hash = (37 * hash) + APP_ID_FIELD_NUMBER; |
| hash = (53 * hash) + getAppId().hashCode(); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| memoizedHashCode = hash; |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| /** |
| * Protobuf type {@code GetTokenRequestProto} |
| */ |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProtoOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_GetTokenRequestProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_GetTokenRequestProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto.Builder.class); |
| } |
| |
| // Construct using org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| appId_ = ""; |
| bitField0_ = (bitField0_ & ~0x00000001); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_GetTokenRequestProto_descriptor; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto getDefaultInstanceForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto build() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto buildPartial() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| result.appId_ = appId_; |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto) { |
| return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto other) { |
| if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto.getDefaultInstance()) return this; |
| if (other.hasAppId()) { |
| bitField0_ |= 0x00000001; |
| appId_ = other.appId_; |
| onChanged(); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto parsedMessage = null; |
| try { |
| parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto) e.getUnfinishedMessage(); |
| throw e; |
| } finally { |
| if (parsedMessage != null) { |
| mergeFrom(parsedMessage); |
| } |
| } |
| return this; |
| } |
| private int bitField0_; |
| |
| // optional string app_id = 1; |
| private java.lang.Object appId_ = ""; |
| /** |
| * <code>optional string app_id = 1;</code> |
| */ |
| public boolean hasAppId() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional string app_id = 1;</code> |
| */ |
| public java.lang.String getAppId() { |
| java.lang.Object ref = appId_; |
| if (!(ref instanceof java.lang.String)) { |
| java.lang.String s = ((com.google.protobuf.ByteString) ref) |
| .toStringUtf8(); |
| appId_ = s; |
| return s; |
| } else { |
| return (java.lang.String) ref; |
| } |
| } |
| /** |
| * <code>optional string app_id = 1;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getAppIdBytes() { |
| java.lang.Object ref = appId_; |
| if (ref instanceof String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| appId_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| /** |
| * <code>optional string app_id = 1;</code> |
| */ |
| public Builder setAppId( |
| java.lang.String value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000001; |
| appId_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string app_id = 1;</code> |
| */ |
| public Builder clearAppId() { |
| bitField0_ = (bitField0_ & ~0x00000001); |
| appId_ = getDefaultInstance().getAppId(); |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional string app_id = 1;</code> |
| */ |
| public Builder setAppIdBytes( |
| com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000001; |
| appId_ = value; |
| onChanged(); |
| return this; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:GetTokenRequestProto) |
| } |
| |
| static { |
| defaultInstance = new GetTokenRequestProto(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:GetTokenRequestProto) |
| } |
| |
| public interface GetTokenResponseProtoOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // optional bytes token = 1; |
| /** |
| * <code>optional bytes token = 1;</code> |
| */ |
| boolean hasToken(); |
| /** |
| * <code>optional bytes token = 1;</code> |
| */ |
| com.google.protobuf.ByteString getToken(); |
| } |
| /** |
| * Protobuf type {@code GetTokenResponseProto} |
| */ |
| public static final class GetTokenResponseProto extends |
| com.google.protobuf.GeneratedMessage |
| implements GetTokenResponseProtoOrBuilder { |
| // Use GetTokenResponseProto.newBuilder() to construct. |
| private GetTokenResponseProto(com.google.protobuf.GeneratedMessage.Builder<?> builder) { |
| super(builder); |
| this.unknownFields = builder.getUnknownFields(); |
| } |
| private GetTokenResponseProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } |
| |
| private static final GetTokenResponseProto defaultInstance; |
| public static GetTokenResponseProto getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public GetTokenResponseProto getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| private final com.google.protobuf.UnknownFieldSet unknownFields; |
| @java.lang.Override |
| public final com.google.protobuf.UnknownFieldSet |
| getUnknownFields() { |
| return this.unknownFields; |
| } |
| private GetTokenResponseProto( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| initFields(); |
| int mutable_bitField0_ = 0; |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder(); |
| try { |
| boolean done = false; |
| while (!done) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| done = true; |
| break; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| done = true; |
| } |
| break; |
| } |
| case 10: { |
| bitField0_ |= 0x00000001; |
| token_ = input.readBytes(); |
| break; |
| } |
| } |
| } |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| throw e.setUnfinishedMessage(this); |
| } catch (java.io.IOException e) { |
| throw new com.google.protobuf.InvalidProtocolBufferException( |
| e.getMessage()).setUnfinishedMessage(this); |
| } finally { |
| this.unknownFields = unknownFields.build(); |
| makeExtensionsImmutable(); |
| } |
| } |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_GetTokenResponseProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_GetTokenResponseProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto.Builder.class); |
| } |
| |
| public static com.google.protobuf.Parser<GetTokenResponseProto> PARSER = |
| new com.google.protobuf.AbstractParser<GetTokenResponseProto>() { |
| public GetTokenResponseProto parsePartialFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return new GetTokenResponseProto(input, extensionRegistry); |
| } |
| }; |
| |
| @java.lang.Override |
| public com.google.protobuf.Parser<GetTokenResponseProto> getParserForType() { |
| return PARSER; |
| } |
| |
| private int bitField0_; |
| // optional bytes token = 1; |
| public static final int TOKEN_FIELD_NUMBER = 1; |
| private com.google.protobuf.ByteString token_; |
| /** |
| * <code>optional bytes token = 1;</code> |
| */ |
| public boolean hasToken() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional bytes token = 1;</code> |
| */ |
| public com.google.protobuf.ByteString getToken() { |
| return token_; |
| } |
| |
| private void initFields() { |
| token_ = com.google.protobuf.ByteString.EMPTY; |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeBytes(1, token_); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(1, token_); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto) obj; |
| |
| boolean result = true; |
| result = result && (hasToken() == other.hasToken()); |
| if (hasToken()) { |
| result = result && getToken() |
| .equals(other.getToken()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| private int memoizedHashCode = 0; |
| @java.lang.Override |
| public int hashCode() { |
| if (memoizedHashCode != 0) { |
| return memoizedHashCode; |
| } |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasToken()) { |
| hash = (37 * hash) + TOKEN_FIELD_NUMBER; |
| hash = (53 * hash) + getToken().hashCode(); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| memoizedHashCode = hash; |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| /** |
| * Protobuf type {@code GetTokenResponseProto} |
| */ |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProtoOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_GetTokenResponseProto_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_GetTokenResponseProto_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto.Builder.class); |
| } |
| |
| // Construct using org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| token_ = com.google.protobuf.ByteString.EMPTY; |
| bitField0_ = (bitField0_ & ~0x00000001); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_GetTokenResponseProto_descriptor; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto getDefaultInstanceForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto build() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto buildPartial() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| result.token_ = token_; |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto) { |
| return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto other) { |
| if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto.getDefaultInstance()) return this; |
| if (other.hasToken()) { |
| setToken(other.getToken()); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto parsedMessage = null; |
| try { |
| parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto) e.getUnfinishedMessage(); |
| throw e; |
| } finally { |
| if (parsedMessage != null) { |
| mergeFrom(parsedMessage); |
| } |
| } |
| return this; |
| } |
| private int bitField0_; |
| |
| // optional bytes token = 1; |
| private com.google.protobuf.ByteString token_ = com.google.protobuf.ByteString.EMPTY; |
| /** |
| * <code>optional bytes token = 1;</code> |
| */ |
| public boolean hasToken() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>optional bytes token = 1;</code> |
| */ |
| public com.google.protobuf.ByteString getToken() { |
| return token_; |
| } |
| /** |
| * <code>optional bytes token = 1;</code> |
| */ |
| public Builder setToken(com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000001; |
| token_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional bytes token = 1;</code> |
| */ |
| public Builder clearToken() { |
| bitField0_ = (bitField0_ & ~0x00000001); |
| token_ = getDefaultInstance().getToken(); |
| onChanged(); |
| return this; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:GetTokenResponseProto) |
| } |
| |
| static { |
| defaultInstance = new GetTokenResponseProto(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:GetTokenResponseProto) |
| } |
| |
| public interface LlapOutputSocketInitMessageOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // required string fragment_id = 1; |
| /** |
| * <code>required string fragment_id = 1;</code> |
| */ |
| boolean hasFragmentId(); |
| /** |
| * <code>required string fragment_id = 1;</code> |
| */ |
| java.lang.String getFragmentId(); |
| /** |
| * <code>required string fragment_id = 1;</code> |
| */ |
| com.google.protobuf.ByteString |
| getFragmentIdBytes(); |
| |
| // optional bytes token = 2; |
| /** |
| * <code>optional bytes token = 2;</code> |
| */ |
| boolean hasToken(); |
| /** |
| * <code>optional bytes token = 2;</code> |
| */ |
| com.google.protobuf.ByteString getToken(); |
| } |
| /** |
| * Protobuf type {@code LlapOutputSocketInitMessage} |
| * |
| * <pre> |
| * The message sent by external client to claim the output from the output socket. |
| * </pre> |
| */ |
| public static final class LlapOutputSocketInitMessage extends |
| com.google.protobuf.GeneratedMessage |
| implements LlapOutputSocketInitMessageOrBuilder { |
| // Use LlapOutputSocketInitMessage.newBuilder() to construct. |
| private LlapOutputSocketInitMessage(com.google.protobuf.GeneratedMessage.Builder<?> builder) { |
| super(builder); |
| this.unknownFields = builder.getUnknownFields(); |
| } |
| private LlapOutputSocketInitMessage(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } |
| |
| private static final LlapOutputSocketInitMessage defaultInstance; |
| public static LlapOutputSocketInitMessage getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public LlapOutputSocketInitMessage getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| private final com.google.protobuf.UnknownFieldSet unknownFields; |
| @java.lang.Override |
| public final com.google.protobuf.UnknownFieldSet |
| getUnknownFields() { |
| return this.unknownFields; |
| } |
| private LlapOutputSocketInitMessage( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| initFields(); |
| int mutable_bitField0_ = 0; |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder(); |
| try { |
| boolean done = false; |
| while (!done) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| done = true; |
| break; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| done = true; |
| } |
| break; |
| } |
| case 10: { |
| bitField0_ |= 0x00000001; |
| fragmentId_ = input.readBytes(); |
| break; |
| } |
| case 18: { |
| bitField0_ |= 0x00000002; |
| token_ = input.readBytes(); |
| break; |
| } |
| } |
| } |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| throw e.setUnfinishedMessage(this); |
| } catch (java.io.IOException e) { |
| throw new com.google.protobuf.InvalidProtocolBufferException( |
| e.getMessage()).setUnfinishedMessage(this); |
| } finally { |
| this.unknownFields = unknownFields.build(); |
| makeExtensionsImmutable(); |
| } |
| } |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_LlapOutputSocketInitMessage_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_LlapOutputSocketInitMessage_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage.Builder.class); |
| } |
| |
| public static com.google.protobuf.Parser<LlapOutputSocketInitMessage> PARSER = |
| new com.google.protobuf.AbstractParser<LlapOutputSocketInitMessage>() { |
| public LlapOutputSocketInitMessage parsePartialFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return new LlapOutputSocketInitMessage(input, extensionRegistry); |
| } |
| }; |
| |
| @java.lang.Override |
| public com.google.protobuf.Parser<LlapOutputSocketInitMessage> getParserForType() { |
| return PARSER; |
| } |
| |
| private int bitField0_; |
| // required string fragment_id = 1; |
| public static final int FRAGMENT_ID_FIELD_NUMBER = 1; |
| private java.lang.Object fragmentId_; |
| /** |
| * <code>required string fragment_id = 1;</code> |
| */ |
| public boolean hasFragmentId() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>required string fragment_id = 1;</code> |
| */ |
| public java.lang.String getFragmentId() { |
| java.lang.Object ref = fragmentId_; |
| if (ref instanceof java.lang.String) { |
| return (java.lang.String) ref; |
| } else { |
| com.google.protobuf.ByteString bs = |
| (com.google.protobuf.ByteString) ref; |
| java.lang.String s = bs.toStringUtf8(); |
| if (bs.isValidUtf8()) { |
| fragmentId_ = s; |
| } |
| return s; |
| } |
| } |
| /** |
| * <code>required string fragment_id = 1;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getFragmentIdBytes() { |
| java.lang.Object ref = fragmentId_; |
| if (ref instanceof java.lang.String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| fragmentId_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| |
| // optional bytes token = 2; |
| public static final int TOKEN_FIELD_NUMBER = 2; |
| private com.google.protobuf.ByteString token_; |
| /** |
| * <code>optional bytes token = 2;</code> |
| */ |
| public boolean hasToken() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional bytes token = 2;</code> |
| */ |
| public com.google.protobuf.ByteString getToken() { |
| return token_; |
| } |
| |
| private void initFields() { |
| fragmentId_ = ""; |
| token_ = com.google.protobuf.ByteString.EMPTY; |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| if (!hasFragmentId()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeBytes(1, getFragmentIdBytes()); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| output.writeBytes(2, token_); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(1, getFragmentIdBytes()); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(2, token_); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage other = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage) obj; |
| |
| boolean result = true; |
| result = result && (hasFragmentId() == other.hasFragmentId()); |
| if (hasFragmentId()) { |
| result = result && getFragmentId() |
| .equals(other.getFragmentId()); |
| } |
| result = result && (hasToken() == other.hasToken()); |
| if (hasToken()) { |
| result = result && getToken() |
| .equals(other.getToken()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| private int memoizedHashCode = 0; |
| @java.lang.Override |
| public int hashCode() { |
| if (memoizedHashCode != 0) { |
| return memoizedHashCode; |
| } |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasFragmentId()) { |
| hash = (37 * hash) + FRAGMENT_ID_FIELD_NUMBER; |
| hash = (53 * hash) + getFragmentId().hashCode(); |
| } |
| if (hasToken()) { |
| hash = (37 * hash) + TOKEN_FIELD_NUMBER; |
| hash = (53 * hash) + getToken().hashCode(); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| memoizedHashCode = hash; |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| /** |
| * Protobuf type {@code LlapOutputSocketInitMessage} |
| * |
| * <pre> |
| * The message sent by external client to claim the output from the output socket. |
| * </pre> |
| */ |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessageOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_LlapOutputSocketInitMessage_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_LlapOutputSocketInitMessage_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage.class, org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage.Builder.class); |
| } |
| |
| // Construct using org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| fragmentId_ = ""; |
| bitField0_ = (bitField0_ & ~0x00000001); |
| token_ = com.google.protobuf.ByteString.EMPTY; |
| bitField0_ = (bitField0_ & ~0x00000002); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.internal_static_LlapOutputSocketInitMessage_descriptor; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage getDefaultInstanceForType() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage build() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage buildPartial() { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage result = new org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| result.fragmentId_ = fragmentId_; |
| if (((from_bitField0_ & 0x00000002) == 0x00000002)) { |
| to_bitField0_ |= 0x00000002; |
| } |
| result.token_ = token_; |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage) { |
| return mergeFrom((org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage other) { |
| if (other == org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage.getDefaultInstance()) return this; |
| if (other.hasFragmentId()) { |
| bitField0_ |= 0x00000001; |
| fragmentId_ = other.fragmentId_; |
| onChanged(); |
| } |
| if (other.hasToken()) { |
| setToken(other.getToken()); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| if (!hasFragmentId()) { |
| |
| return false; |
| } |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage parsedMessage = null; |
| try { |
| parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| parsedMessage = (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage) e.getUnfinishedMessage(); |
| throw e; |
| } finally { |
| if (parsedMessage != null) { |
| mergeFrom(parsedMessage); |
| } |
| } |
| return this; |
| } |
| private int bitField0_; |
| |
| // required string fragment_id = 1; |
| private java.lang.Object fragmentId_ = ""; |
| /** |
| * <code>required string fragment_id = 1;</code> |
| */ |
| public boolean hasFragmentId() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>required string fragment_id = 1;</code> |
| */ |
| public java.lang.String getFragmentId() { |
| java.lang.Object ref = fragmentId_; |
| if (!(ref instanceof java.lang.String)) { |
| java.lang.String s = ((com.google.protobuf.ByteString) ref) |
| .toStringUtf8(); |
| fragmentId_ = s; |
| return s; |
| } else { |
| return (java.lang.String) ref; |
| } |
| } |
| /** |
| * <code>required string fragment_id = 1;</code> |
| */ |
| public com.google.protobuf.ByteString |
| getFragmentIdBytes() { |
| java.lang.Object ref = fragmentId_; |
| if (ref instanceof String) { |
| com.google.protobuf.ByteString b = |
| com.google.protobuf.ByteString.copyFromUtf8( |
| (java.lang.String) ref); |
| fragmentId_ = b; |
| return b; |
| } else { |
| return (com.google.protobuf.ByteString) ref; |
| } |
| } |
| /** |
| * <code>required string fragment_id = 1;</code> |
| */ |
| public Builder setFragmentId( |
| java.lang.String value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000001; |
| fragmentId_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>required string fragment_id = 1;</code> |
| */ |
| public Builder clearFragmentId() { |
| bitField0_ = (bitField0_ & ~0x00000001); |
| fragmentId_ = getDefaultInstance().getFragmentId(); |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>required string fragment_id = 1;</code> |
| */ |
| public Builder setFragmentIdBytes( |
| com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000001; |
| fragmentId_ = value; |
| onChanged(); |
| return this; |
| } |
| |
| // optional bytes token = 2; |
| private com.google.protobuf.ByteString token_ = com.google.protobuf.ByteString.EMPTY; |
| /** |
| * <code>optional bytes token = 2;</code> |
| */ |
| public boolean hasToken() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional bytes token = 2;</code> |
| */ |
| public com.google.protobuf.ByteString getToken() { |
| return token_; |
| } |
| /** |
| * <code>optional bytes token = 2;</code> |
| */ |
| public Builder setToken(com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000002; |
| token_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional bytes token = 2;</code> |
| */ |
| public Builder clearToken() { |
| bitField0_ = (bitField0_ & ~0x00000002); |
| token_ = getDefaultInstance().getToken(); |
| onChanged(); |
| return this; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:LlapOutputSocketInitMessage) |
| } |
| |
| static { |
| defaultInstance = new LlapOutputSocketInitMessage(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:LlapOutputSocketInitMessage) |
| } |
| |
| /** |
| * Protobuf service {@code LlapDaemonProtocol} |
| */ |
| public static abstract class LlapDaemonProtocol |
| implements com.google.protobuf.Service { |
| protected LlapDaemonProtocol() {} |
| |
| public interface Interface { |
| /** |
| * <code>rpc submitWork(.SubmitWorkRequestProto) returns (.SubmitWorkResponseProto);</code> |
| */ |
| public abstract void submitWork( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto request, |
| com.google.protobuf.RpcCallback<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto> done); |
| |
| /** |
| * <code>rpc sourceStateUpdated(.SourceStateUpdatedRequestProto) returns (.SourceStateUpdatedResponseProto);</code> |
| */ |
| public abstract void sourceStateUpdated( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto request, |
| com.google.protobuf.RpcCallback<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto> done); |
| |
| /** |
| * <code>rpc queryComplete(.QueryCompleteRequestProto) returns (.QueryCompleteResponseProto);</code> |
| */ |
| public abstract void queryComplete( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto request, |
| com.google.protobuf.RpcCallback<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto> done); |
| |
| /** |
| * <code>rpc terminateFragment(.TerminateFragmentRequestProto) returns (.TerminateFragmentResponseProto);</code> |
| */ |
| public abstract void terminateFragment( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto request, |
| com.google.protobuf.RpcCallback<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto> done); |
| |
| } |
| |
| public static com.google.protobuf.Service newReflectiveService( |
| final Interface impl) { |
| return new LlapDaemonProtocol() { |
| @java.lang.Override |
| public void submitWork( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto request, |
| com.google.protobuf.RpcCallback<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto> done) { |
| impl.submitWork(controller, request, done); |
| } |
| |
| @java.lang.Override |
| public void sourceStateUpdated( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto request, |
| com.google.protobuf.RpcCallback<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto> done) { |
| impl.sourceStateUpdated(controller, request, done); |
| } |
| |
| @java.lang.Override |
| public void queryComplete( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto request, |
| com.google.protobuf.RpcCallback<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto> done) { |
| impl.queryComplete(controller, request, done); |
| } |
| |
| @java.lang.Override |
| public void terminateFragment( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto request, |
| com.google.protobuf.RpcCallback<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto> done) { |
| impl.terminateFragment(controller, request, done); |
| } |
| |
| }; |
| } |
| |
| public static com.google.protobuf.BlockingService |
| newReflectiveBlockingService(final BlockingInterface impl) { |
| return new com.google.protobuf.BlockingService() { |
| public final com.google.protobuf.Descriptors.ServiceDescriptor |
| getDescriptorForType() { |
| return getDescriptor(); |
| } |
| |
| public final com.google.protobuf.Message callBlockingMethod( |
| com.google.protobuf.Descriptors.MethodDescriptor method, |
| com.google.protobuf.RpcController controller, |
| com.google.protobuf.Message request) |
| throws com.google.protobuf.ServiceException { |
| if (method.getService() != getDescriptor()) { |
| throw new java.lang.IllegalArgumentException( |
| "Service.callBlockingMethod() given method descriptor for " + |
| "wrong service type."); |
| } |
| switch(method.getIndex()) { |
| case 0: |
| return impl.submitWork(controller, (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto)request); |
| case 1: |
| return impl.sourceStateUpdated(controller, (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto)request); |
| case 2: |
| return impl.queryComplete(controller, (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto)request); |
| case 3: |
| return impl.terminateFragment(controller, (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto)request); |
| default: |
| throw new java.lang.AssertionError("Can't get here."); |
| } |
| } |
| |
| public final com.google.protobuf.Message |
| getRequestPrototype( |
| com.google.protobuf.Descriptors.MethodDescriptor method) { |
| if (method.getService() != getDescriptor()) { |
| throw new java.lang.IllegalArgumentException( |
| "Service.getRequestPrototype() given method " + |
| "descriptor for wrong service type."); |
| } |
| switch(method.getIndex()) { |
| case 0: |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto.getDefaultInstance(); |
| case 1: |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto.getDefaultInstance(); |
| case 2: |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto.getDefaultInstance(); |
| case 3: |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto.getDefaultInstance(); |
| default: |
| throw new java.lang.AssertionError("Can't get here."); |
| } |
| } |
| |
| public final com.google.protobuf.Message |
| getResponsePrototype( |
| com.google.protobuf.Descriptors.MethodDescriptor method) { |
| if (method.getService() != getDescriptor()) { |
| throw new java.lang.IllegalArgumentException( |
| "Service.getResponsePrototype() given method " + |
| "descriptor for wrong service type."); |
| } |
| switch(method.getIndex()) { |
| case 0: |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto.getDefaultInstance(); |
| case 1: |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto.getDefaultInstance(); |
| case 2: |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto.getDefaultInstance(); |
| case 3: |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto.getDefaultInstance(); |
| default: |
| throw new java.lang.AssertionError("Can't get here."); |
| } |
| } |
| |
| }; |
| } |
| |
| /** |
| * <code>rpc submitWork(.SubmitWorkRequestProto) returns (.SubmitWorkResponseProto);</code> |
| */ |
| public abstract void submitWork( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto request, |
| com.google.protobuf.RpcCallback<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto> done); |
| |
| /** |
| * <code>rpc sourceStateUpdated(.SourceStateUpdatedRequestProto) returns (.SourceStateUpdatedResponseProto);</code> |
| */ |
| public abstract void sourceStateUpdated( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto request, |
| com.google.protobuf.RpcCallback<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto> done); |
| |
| /** |
| * <code>rpc queryComplete(.QueryCompleteRequestProto) returns (.QueryCompleteResponseProto);</code> |
| */ |
| public abstract void queryComplete( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto request, |
| com.google.protobuf.RpcCallback<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto> done); |
| |
| /** |
| * <code>rpc terminateFragment(.TerminateFragmentRequestProto) returns (.TerminateFragmentResponseProto);</code> |
| */ |
| public abstract void terminateFragment( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto request, |
| com.google.protobuf.RpcCallback<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto> done); |
| |
| public static final |
| com.google.protobuf.Descriptors.ServiceDescriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.getDescriptor().getServices().get(0); |
| } |
| public final com.google.protobuf.Descriptors.ServiceDescriptor |
| getDescriptorForType() { |
| return getDescriptor(); |
| } |
| |
| public final void callMethod( |
| com.google.protobuf.Descriptors.MethodDescriptor method, |
| com.google.protobuf.RpcController controller, |
| com.google.protobuf.Message request, |
| com.google.protobuf.RpcCallback< |
| com.google.protobuf.Message> done) { |
| if (method.getService() != getDescriptor()) { |
| throw new java.lang.IllegalArgumentException( |
| "Service.callMethod() given method descriptor for wrong " + |
| "service type."); |
| } |
| switch(method.getIndex()) { |
| case 0: |
| this.submitWork(controller, (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto)request, |
| com.google.protobuf.RpcUtil.<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto>specializeCallback( |
| done)); |
| return; |
| case 1: |
| this.sourceStateUpdated(controller, (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto)request, |
| com.google.protobuf.RpcUtil.<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto>specializeCallback( |
| done)); |
| return; |
| case 2: |
| this.queryComplete(controller, (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto)request, |
| com.google.protobuf.RpcUtil.<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto>specializeCallback( |
| done)); |
| return; |
| case 3: |
| this.terminateFragment(controller, (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto)request, |
| com.google.protobuf.RpcUtil.<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto>specializeCallback( |
| done)); |
| return; |
| default: |
| throw new java.lang.AssertionError("Can't get here."); |
| } |
| } |
| |
| public final com.google.protobuf.Message |
| getRequestPrototype( |
| com.google.protobuf.Descriptors.MethodDescriptor method) { |
| if (method.getService() != getDescriptor()) { |
| throw new java.lang.IllegalArgumentException( |
| "Service.getRequestPrototype() given method " + |
| "descriptor for wrong service type."); |
| } |
| switch(method.getIndex()) { |
| case 0: |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto.getDefaultInstance(); |
| case 1: |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto.getDefaultInstance(); |
| case 2: |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto.getDefaultInstance(); |
| case 3: |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto.getDefaultInstance(); |
| default: |
| throw new java.lang.AssertionError("Can't get here."); |
| } |
| } |
| |
| public final com.google.protobuf.Message |
| getResponsePrototype( |
| com.google.protobuf.Descriptors.MethodDescriptor method) { |
| if (method.getService() != getDescriptor()) { |
| throw new java.lang.IllegalArgumentException( |
| "Service.getResponsePrototype() given method " + |
| "descriptor for wrong service type."); |
| } |
| switch(method.getIndex()) { |
| case 0: |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto.getDefaultInstance(); |
| case 1: |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto.getDefaultInstance(); |
| case 2: |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto.getDefaultInstance(); |
| case 3: |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto.getDefaultInstance(); |
| default: |
| throw new java.lang.AssertionError("Can't get here."); |
| } |
| } |
| |
| public static Stub newStub( |
| com.google.protobuf.RpcChannel channel) { |
| return new Stub(channel); |
| } |
| |
| public static final class Stub extends org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapDaemonProtocol implements Interface { |
| private Stub(com.google.protobuf.RpcChannel channel) { |
| this.channel = channel; |
| } |
| |
| private final com.google.protobuf.RpcChannel channel; |
| |
| public com.google.protobuf.RpcChannel getChannel() { |
| return channel; |
| } |
| |
| public void submitWork( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto request, |
| com.google.protobuf.RpcCallback<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto> done) { |
| channel.callMethod( |
| getDescriptor().getMethods().get(0), |
| controller, |
| request, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto.getDefaultInstance(), |
| com.google.protobuf.RpcUtil.generalizeCallback( |
| done, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto.class, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto.getDefaultInstance())); |
| } |
| |
| public void sourceStateUpdated( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto request, |
| com.google.protobuf.RpcCallback<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto> done) { |
| channel.callMethod( |
| getDescriptor().getMethods().get(1), |
| controller, |
| request, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto.getDefaultInstance(), |
| com.google.protobuf.RpcUtil.generalizeCallback( |
| done, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto.class, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto.getDefaultInstance())); |
| } |
| |
| public void queryComplete( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto request, |
| com.google.protobuf.RpcCallback<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto> done) { |
| channel.callMethod( |
| getDescriptor().getMethods().get(2), |
| controller, |
| request, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto.getDefaultInstance(), |
| com.google.protobuf.RpcUtil.generalizeCallback( |
| done, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto.class, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto.getDefaultInstance())); |
| } |
| |
| public void terminateFragment( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto request, |
| com.google.protobuf.RpcCallback<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto> done) { |
| channel.callMethod( |
| getDescriptor().getMethods().get(3), |
| controller, |
| request, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto.getDefaultInstance(), |
| com.google.protobuf.RpcUtil.generalizeCallback( |
| done, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto.class, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto.getDefaultInstance())); |
| } |
| } |
| |
| public static BlockingInterface newBlockingStub( |
| com.google.protobuf.BlockingRpcChannel channel) { |
| return new BlockingStub(channel); |
| } |
| |
| public interface BlockingInterface { |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto submitWork( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto request) |
| throws com.google.protobuf.ServiceException; |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto sourceStateUpdated( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto request) |
| throws com.google.protobuf.ServiceException; |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto queryComplete( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto request) |
| throws com.google.protobuf.ServiceException; |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto terminateFragment( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto request) |
| throws com.google.protobuf.ServiceException; |
| } |
| |
| private static final class BlockingStub implements BlockingInterface { |
| private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { |
| this.channel = channel; |
| } |
| |
| private final com.google.protobuf.BlockingRpcChannel channel; |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto submitWork( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto request) |
| throws com.google.protobuf.ServiceException { |
| return (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto) channel.callBlockingMethod( |
| getDescriptor().getMethods().get(0), |
| controller, |
| request, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkResponseProto.getDefaultInstance()); |
| } |
| |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto sourceStateUpdated( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedRequestProto request) |
| throws com.google.protobuf.ServiceException { |
| return (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto) channel.callBlockingMethod( |
| getDescriptor().getMethods().get(1), |
| controller, |
| request, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SourceStateUpdatedResponseProto.getDefaultInstance()); |
| } |
| |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto queryComplete( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteRequestProto request) |
| throws com.google.protobuf.ServiceException { |
| return (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto) channel.callBlockingMethod( |
| getDescriptor().getMethods().get(2), |
| controller, |
| request, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.QueryCompleteResponseProto.getDefaultInstance()); |
| } |
| |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto terminateFragment( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentRequestProto request) |
| throws com.google.protobuf.ServiceException { |
| return (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto) channel.callBlockingMethod( |
| getDescriptor().getMethods().get(3), |
| controller, |
| request, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.TerminateFragmentResponseProto.getDefaultInstance()); |
| } |
| |
| } |
| |
| // @@protoc_insertion_point(class_scope:LlapDaemonProtocol) |
| } |
| |
| /** |
| * Protobuf service {@code LlapManagementProtocol} |
| */ |
| public static abstract class LlapManagementProtocol |
| implements com.google.protobuf.Service { |
| protected LlapManagementProtocol() {} |
| |
| public interface Interface { |
| /** |
| * <code>rpc getDelegationToken(.GetTokenRequestProto) returns (.GetTokenResponseProto);</code> |
| */ |
| public abstract void getDelegationToken( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto request, |
| com.google.protobuf.RpcCallback<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto> done); |
| |
| } |
| |
| public static com.google.protobuf.Service newReflectiveService( |
| final Interface impl) { |
| return new LlapManagementProtocol() { |
| @java.lang.Override |
| public void getDelegationToken( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto request, |
| com.google.protobuf.RpcCallback<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto> done) { |
| impl.getDelegationToken(controller, request, done); |
| } |
| |
| }; |
| } |
| |
| public static com.google.protobuf.BlockingService |
| newReflectiveBlockingService(final BlockingInterface impl) { |
| return new com.google.protobuf.BlockingService() { |
| public final com.google.protobuf.Descriptors.ServiceDescriptor |
| getDescriptorForType() { |
| return getDescriptor(); |
| } |
| |
| public final com.google.protobuf.Message callBlockingMethod( |
| com.google.protobuf.Descriptors.MethodDescriptor method, |
| com.google.protobuf.RpcController controller, |
| com.google.protobuf.Message request) |
| throws com.google.protobuf.ServiceException { |
| if (method.getService() != getDescriptor()) { |
| throw new java.lang.IllegalArgumentException( |
| "Service.callBlockingMethod() given method descriptor for " + |
| "wrong service type."); |
| } |
| switch(method.getIndex()) { |
| case 0: |
| return impl.getDelegationToken(controller, (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto)request); |
| default: |
| throw new java.lang.AssertionError("Can't get here."); |
| } |
| } |
| |
| public final com.google.protobuf.Message |
| getRequestPrototype( |
| com.google.protobuf.Descriptors.MethodDescriptor method) { |
| if (method.getService() != getDescriptor()) { |
| throw new java.lang.IllegalArgumentException( |
| "Service.getRequestPrototype() given method " + |
| "descriptor for wrong service type."); |
| } |
| switch(method.getIndex()) { |
| case 0: |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto.getDefaultInstance(); |
| default: |
| throw new java.lang.AssertionError("Can't get here."); |
| } |
| } |
| |
| public final com.google.protobuf.Message |
| getResponsePrototype( |
| com.google.protobuf.Descriptors.MethodDescriptor method) { |
| if (method.getService() != getDescriptor()) { |
| throw new java.lang.IllegalArgumentException( |
| "Service.getResponsePrototype() given method " + |
| "descriptor for wrong service type."); |
| } |
| switch(method.getIndex()) { |
| case 0: |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto.getDefaultInstance(); |
| default: |
| throw new java.lang.AssertionError("Can't get here."); |
| } |
| } |
| |
| }; |
| } |
| |
| /** |
| * <code>rpc getDelegationToken(.GetTokenRequestProto) returns (.GetTokenResponseProto);</code> |
| */ |
| public abstract void getDelegationToken( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto request, |
| com.google.protobuf.RpcCallback<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto> done); |
| |
| public static final |
| com.google.protobuf.Descriptors.ServiceDescriptor |
| getDescriptor() { |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.getDescriptor().getServices().get(1); |
| } |
| public final com.google.protobuf.Descriptors.ServiceDescriptor |
| getDescriptorForType() { |
| return getDescriptor(); |
| } |
| |
| public final void callMethod( |
| com.google.protobuf.Descriptors.MethodDescriptor method, |
| com.google.protobuf.RpcController controller, |
| com.google.protobuf.Message request, |
| com.google.protobuf.RpcCallback< |
| com.google.protobuf.Message> done) { |
| if (method.getService() != getDescriptor()) { |
| throw new java.lang.IllegalArgumentException( |
| "Service.callMethod() given method descriptor for wrong " + |
| "service type."); |
| } |
| switch(method.getIndex()) { |
| case 0: |
| this.getDelegationToken(controller, (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto)request, |
| com.google.protobuf.RpcUtil.<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto>specializeCallback( |
| done)); |
| return; |
| default: |
| throw new java.lang.AssertionError("Can't get here."); |
| } |
| } |
| |
| public final com.google.protobuf.Message |
| getRequestPrototype( |
| com.google.protobuf.Descriptors.MethodDescriptor method) { |
| if (method.getService() != getDescriptor()) { |
| throw new java.lang.IllegalArgumentException( |
| "Service.getRequestPrototype() given method " + |
| "descriptor for wrong service type."); |
| } |
| switch(method.getIndex()) { |
| case 0: |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto.getDefaultInstance(); |
| default: |
| throw new java.lang.AssertionError("Can't get here."); |
| } |
| } |
| |
| public final com.google.protobuf.Message |
| getResponsePrototype( |
| com.google.protobuf.Descriptors.MethodDescriptor method) { |
| if (method.getService() != getDescriptor()) { |
| throw new java.lang.IllegalArgumentException( |
| "Service.getResponsePrototype() given method " + |
| "descriptor for wrong service type."); |
| } |
| switch(method.getIndex()) { |
| case 0: |
| return org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto.getDefaultInstance(); |
| default: |
| throw new java.lang.AssertionError("Can't get here."); |
| } |
| } |
| |
| public static Stub newStub( |
| com.google.protobuf.RpcChannel channel) { |
| return new Stub(channel); |
| } |
| |
| public static final class Stub extends org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapManagementProtocol implements Interface { |
| private Stub(com.google.protobuf.RpcChannel channel) { |
| this.channel = channel; |
| } |
| |
| private final com.google.protobuf.RpcChannel channel; |
| |
| public com.google.protobuf.RpcChannel getChannel() { |
| return channel; |
| } |
| |
| public void getDelegationToken( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto request, |
| com.google.protobuf.RpcCallback<org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto> done) { |
| channel.callMethod( |
| getDescriptor().getMethods().get(0), |
| controller, |
| request, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto.getDefaultInstance(), |
| com.google.protobuf.RpcUtil.generalizeCallback( |
| done, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto.class, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto.getDefaultInstance())); |
| } |
| } |
| |
| public static BlockingInterface newBlockingStub( |
| com.google.protobuf.BlockingRpcChannel channel) { |
| return new BlockingStub(channel); |
| } |
| |
| public interface BlockingInterface { |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto getDelegationToken( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto request) |
| throws com.google.protobuf.ServiceException; |
| } |
| |
| private static final class BlockingStub implements BlockingInterface { |
| private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { |
| this.channel = channel; |
| } |
| |
| private final com.google.protobuf.BlockingRpcChannel channel; |
| |
| public org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto getDelegationToken( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenRequestProto request) |
| throws com.google.protobuf.ServiceException { |
| return (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto) channel.callBlockingMethod( |
| getDescriptor().getMethods().get(0), |
| controller, |
| request, |
| org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.GetTokenResponseProto.getDefaultInstance()); |
| } |
| |
| } |
| |
| // @@protoc_insertion_point(class_scope:LlapManagementProtocol) |
| } |
| |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_UserPayloadProto_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_UserPayloadProto_fieldAccessorTable; |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_EntityDescriptorProto_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_EntityDescriptorProto_fieldAccessorTable; |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_IOSpecProto_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_IOSpecProto_fieldAccessorTable; |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_GroupInputSpecProto_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_GroupInputSpecProto_fieldAccessorTable; |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_SignableVertexSpec_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_SignableVertexSpec_fieldAccessorTable; |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_VertexOrBinary_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_VertexOrBinary_fieldAccessorTable; |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_FragmentRuntimeInfo_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_FragmentRuntimeInfo_fieldAccessorTable; |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_QueryIdentifierProto_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_QueryIdentifierProto_fieldAccessorTable; |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_NotTezEvent_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_NotTezEvent_fieldAccessorTable; |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_SubmitWorkRequestProto_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_SubmitWorkRequestProto_fieldAccessorTable; |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_SubmitWorkResponseProto_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_SubmitWorkResponseProto_fieldAccessorTable; |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_SourceStateUpdatedRequestProto_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_SourceStateUpdatedRequestProto_fieldAccessorTable; |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_SourceStateUpdatedResponseProto_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_SourceStateUpdatedResponseProto_fieldAccessorTable; |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_QueryCompleteRequestProto_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_QueryCompleteRequestProto_fieldAccessorTable; |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_QueryCompleteResponseProto_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_QueryCompleteResponseProto_fieldAccessorTable; |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_TerminateFragmentRequestProto_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_TerminateFragmentRequestProto_fieldAccessorTable; |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_TerminateFragmentResponseProto_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_TerminateFragmentResponseProto_fieldAccessorTable; |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_GetTokenRequestProto_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_GetTokenRequestProto_fieldAccessorTable; |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_GetTokenResponseProto_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_GetTokenResponseProto_fieldAccessorTable; |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_LlapOutputSocketInitMessage_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_LlapOutputSocketInitMessage_fieldAccessorTable; |
| |
| public static com.google.protobuf.Descriptors.FileDescriptor |
| getDescriptor() { |
| return descriptor; |
| } |
| private static com.google.protobuf.Descriptors.FileDescriptor |
| descriptor; |
| static { |
| java.lang.String[] descriptorData = { |
| "\n%src/protobuf/LlapDaemonProtocol.proto\"" + |
| "9\n\020UserPayloadProto\022\024\n\014user_payload\030\001 \001(" + |
| "\014\022\017\n\007version\030\002 \001(\005\"j\n\025EntityDescriptorPr" + |
| "oto\022\022\n\nclass_name\030\001 \001(\t\022\'\n\014user_payload\030" + |
| "\002 \001(\0132\021.UserPayloadProto\022\024\n\014history_text" + |
| "\030\003 \001(\014\"x\n\013IOSpecProto\022\035\n\025connected_verte" + |
| "x_name\030\001 \001(\t\022-\n\rio_descriptor\030\002 \001(\0132\026.En" + |
| "tityDescriptorProto\022\033\n\023physical_edge_cou" + |
| "nt\030\003 \001(\005\"z\n\023GroupInputSpecProto\022\022\n\ngroup" + |
| "_name\030\001 \001(\t\022\026\n\016group_vertices\030\002 \003(\t\0227\n\027m", |
| "erged_input_descriptor\030\003 \001(\0132\026.EntityDes" + |
| "criptorProto\"\245\003\n\022SignableVertexSpec\022\014\n\004u" + |
| "ser\030\001 \001(\t\022\026\n\016signatureKeyId\030\002 \001(\003\022/\n\020que" + |
| "ry_identifier\030\003 \001(\0132\025.QueryIdentifierPro" + |
| "to\022\025\n\rhive_query_id\030\004 \001(\t\022\020\n\010dag_name\030\005 " + |
| "\001(\t\022\023\n\013vertex_name\030\006 \001(\t\022\024\n\014vertex_index" + |
| "\030\007 \001(\005\022\030\n\020token_identifier\030\010 \001(\t\0224\n\024proc" + |
| "essor_descriptor\030\t \001(\0132\026.EntityDescripto" + |
| "rProto\022!\n\013input_specs\030\n \003(\0132\014.IOSpecProt" + |
| "o\022\"\n\014output_specs\030\013 \003(\0132\014.IOSpecProto\0221\n", |
| "\023grouped_input_specs\030\014 \003(\0132\024.GroupInputS" + |
| "pecProto\022\032\n\022vertex_parallelism\030\r \001(\005\"K\n\016" + |
| "VertexOrBinary\022#\n\006vertex\030\001 \001(\0132\023.Signabl" + |
| "eVertexSpec\022\024\n\014vertexBinary\030\002 \001(\014\"\344\001\n\023Fr" + |
| "agmentRuntimeInfo\022#\n\033num_self_and_upstre" + |
| "am_tasks\030\001 \001(\005\022-\n%num_self_and_upstream_" + |
| "completed_tasks\030\002 \001(\005\022\033\n\023within_dag_prio" + |
| "rity\030\003 \001(\005\022\026\n\016dag_start_time\030\004 \001(\003\022 \n\030fi" + |
| "rst_attempt_start_time\030\005 \001(\003\022\"\n\032current_" + |
| "attempt_start_time\030\006 \001(\003\"d\n\024QueryIdentif", |
| "ierProto\022\035\n\025application_id_string\030\001 \001(\t\022" + |
| "\021\n\tdag_index\030\002 \001(\005\022\032\n\022app_attempt_number" + |
| "\030\003 \001(\005\"l\n\013NotTezEvent\022\037\n\027input_event_pro" + |
| "to_bytes\030\001 \002(\014\022\023\n\013vertex_name\030\002 \002(\t\022\027\n\017d" + |
| "est_input_name\030\003 \002(\t\022\016\n\006key_id\030\004 \001(\005\"\330\002\n" + |
| "\026SubmitWorkRequestProto\022\"\n\twork_spec\030\001 \001" + |
| "(\0132\017.VertexOrBinary\022\033\n\023work_spec_signatu" + |
| "re\030\002 \001(\014\022\027\n\017fragment_number\030\003 \001(\005\022\026\n\016att" + |
| "empt_number\030\004 \001(\005\022\033\n\023container_id_string" + |
| "\030\005 \001(\t\022\017\n\007am_host\030\006 \001(\t\022\017\n\007am_port\030\007 \001(\005", |
| "\022\032\n\022credentials_binary\030\010 \001(\014\0223\n\025fragment" + |
| "_runtime_info\030\t \001(\0132\024.FragmentRuntimeInf" + |
| "o\022\033\n\023initial_event_bytes\030\n \001(\014\022\037\n\027initia" + |
| "l_event_signature\030\013 \001(\014\"J\n\027SubmitWorkRes" + |
| "ponseProto\022/\n\020submission_state\030\001 \001(\0162\025.S" + |
| "ubmissionStateProto\"\205\001\n\036SourceStateUpdat" + |
| "edRequestProto\022/\n\020query_identifier\030\001 \001(\013" + |
| "2\025.QueryIdentifierProto\022\020\n\010src_name\030\002 \001(" + |
| "\t\022 \n\005state\030\003 \001(\0162\021.SourceStateProto\"!\n\037S" + |
| "ourceStateUpdatedResponseProto\"e\n\031QueryC", |
| "ompleteRequestProto\022/\n\020query_identifier\030" + |
| "\001 \001(\0132\025.QueryIdentifierProto\022\027\n\014delete_d" + |
| "elay\030\002 \001(\003:\0010\"\034\n\032QueryCompleteResponsePr" + |
| "oto\"t\n\035TerminateFragmentRequestProto\022/\n\020" + |
| "query_identifier\030\001 \001(\0132\025.QueryIdentifier" + |
| "Proto\022\"\n\032fragment_identifier_string\030\002 \001(" + |
| "\t\" \n\036TerminateFragmentResponseProto\"&\n\024G" + |
| "etTokenRequestProto\022\016\n\006app_id\030\001 \001(\t\"&\n\025G" + |
| "etTokenResponseProto\022\r\n\005token\030\001 \001(\014\"A\n\033L" + |
| "lapOutputSocketInitMessage\022\023\n\013fragment_i", |
| "d\030\001 \002(\t\022\r\n\005token\030\002 \001(\014*2\n\020SourceStatePro" + |
| "to\022\017\n\013S_SUCCEEDED\020\001\022\r\n\tS_RUNNING\020\002*E\n\024Su" + |
| "bmissionStateProto\022\014\n\010ACCEPTED\020\001\022\014\n\010REJE" + |
| "CTED\020\002\022\021\n\rEVICTED_OTHER\020\0032\316\002\n\022LlapDaemon" + |
| "Protocol\022?\n\nsubmitWork\022\027.SubmitWorkReque" + |
| "stProto\032\030.SubmitWorkResponseProto\022W\n\022sou" + |
| "rceStateUpdated\022\037.SourceStateUpdatedRequ" + |
| "estProto\032 .SourceStateUpdatedResponsePro" + |
| "to\022H\n\rqueryComplete\022\032.QueryCompleteReque" + |
| "stProto\032\033.QueryCompleteResponseProto\022T\n\021", |
| "terminateFragment\022\036.TerminateFragmentReq" + |
| "uestProto\032\037.TerminateFragmentResponsePro" + |
| "to2]\n\026LlapManagementProtocol\022C\n\022getDeleg" + |
| "ationToken\022\025.GetTokenRequestProto\032\026.GetT" + |
| "okenResponseProtoBH\n&org.apache.hadoop.h" + |
| "ive.llap.daemon.rpcB\030LlapDaemonProtocolP" + |
| "rotos\210\001\001\240\001\001" |
| }; |
| com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = |
| new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { |
| public com.google.protobuf.ExtensionRegistry assignDescriptors( |
| com.google.protobuf.Descriptors.FileDescriptor root) { |
| descriptor = root; |
| internal_static_UserPayloadProto_descriptor = |
| getDescriptor().getMessageTypes().get(0); |
| internal_static_UserPayloadProto_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_UserPayloadProto_descriptor, |
| new java.lang.String[] { "UserPayload", "Version", }); |
| internal_static_EntityDescriptorProto_descriptor = |
| getDescriptor().getMessageTypes().get(1); |
| internal_static_EntityDescriptorProto_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_EntityDescriptorProto_descriptor, |
| new java.lang.String[] { "ClassName", "UserPayload", "HistoryText", }); |
| internal_static_IOSpecProto_descriptor = |
| getDescriptor().getMessageTypes().get(2); |
| internal_static_IOSpecProto_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_IOSpecProto_descriptor, |
| new java.lang.String[] { "ConnectedVertexName", "IoDescriptor", "PhysicalEdgeCount", }); |
| internal_static_GroupInputSpecProto_descriptor = |
| getDescriptor().getMessageTypes().get(3); |
| internal_static_GroupInputSpecProto_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_GroupInputSpecProto_descriptor, |
| new java.lang.String[] { "GroupName", "GroupVertices", "MergedInputDescriptor", }); |
| internal_static_SignableVertexSpec_descriptor = |
| getDescriptor().getMessageTypes().get(4); |
| internal_static_SignableVertexSpec_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_SignableVertexSpec_descriptor, |
| new java.lang.String[] { "User", "SignatureKeyId", "QueryIdentifier", "HiveQueryId", "DagName", "VertexName", "VertexIndex", "TokenIdentifier", "ProcessorDescriptor", "InputSpecs", "OutputSpecs", "GroupedInputSpecs", "VertexParallelism", }); |
| internal_static_VertexOrBinary_descriptor = |
| getDescriptor().getMessageTypes().get(5); |
| internal_static_VertexOrBinary_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_VertexOrBinary_descriptor, |
| new java.lang.String[] { "Vertex", "VertexBinary", }); |
| internal_static_FragmentRuntimeInfo_descriptor = |
| getDescriptor().getMessageTypes().get(6); |
| internal_static_FragmentRuntimeInfo_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_FragmentRuntimeInfo_descriptor, |
| new java.lang.String[] { "NumSelfAndUpstreamTasks", "NumSelfAndUpstreamCompletedTasks", "WithinDagPriority", "DagStartTime", "FirstAttemptStartTime", "CurrentAttemptStartTime", }); |
| internal_static_QueryIdentifierProto_descriptor = |
| getDescriptor().getMessageTypes().get(7); |
| internal_static_QueryIdentifierProto_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_QueryIdentifierProto_descriptor, |
| new java.lang.String[] { "ApplicationIdString", "DagIndex", "AppAttemptNumber", }); |
| internal_static_NotTezEvent_descriptor = |
| getDescriptor().getMessageTypes().get(8); |
| internal_static_NotTezEvent_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_NotTezEvent_descriptor, |
| new java.lang.String[] { "InputEventProtoBytes", "VertexName", "DestInputName", "KeyId", }); |
| internal_static_SubmitWorkRequestProto_descriptor = |
| getDescriptor().getMessageTypes().get(9); |
| internal_static_SubmitWorkRequestProto_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_SubmitWorkRequestProto_descriptor, |
| new java.lang.String[] { "WorkSpec", "WorkSpecSignature", "FragmentNumber", "AttemptNumber", "ContainerIdString", "AmHost", "AmPort", "CredentialsBinary", "FragmentRuntimeInfo", "InitialEventBytes", "InitialEventSignature", }); |
| internal_static_SubmitWorkResponseProto_descriptor = |
| getDescriptor().getMessageTypes().get(10); |
| internal_static_SubmitWorkResponseProto_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_SubmitWorkResponseProto_descriptor, |
| new java.lang.String[] { "SubmissionState", }); |
| internal_static_SourceStateUpdatedRequestProto_descriptor = |
| getDescriptor().getMessageTypes().get(11); |
| internal_static_SourceStateUpdatedRequestProto_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_SourceStateUpdatedRequestProto_descriptor, |
| new java.lang.String[] { "QueryIdentifier", "SrcName", "State", }); |
| internal_static_SourceStateUpdatedResponseProto_descriptor = |
| getDescriptor().getMessageTypes().get(12); |
| internal_static_SourceStateUpdatedResponseProto_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_SourceStateUpdatedResponseProto_descriptor, |
| new java.lang.String[] { }); |
| internal_static_QueryCompleteRequestProto_descriptor = |
| getDescriptor().getMessageTypes().get(13); |
| internal_static_QueryCompleteRequestProto_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_QueryCompleteRequestProto_descriptor, |
| new java.lang.String[] { "QueryIdentifier", "DeleteDelay", }); |
| internal_static_QueryCompleteResponseProto_descriptor = |
| getDescriptor().getMessageTypes().get(14); |
| internal_static_QueryCompleteResponseProto_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_QueryCompleteResponseProto_descriptor, |
| new java.lang.String[] { }); |
| internal_static_TerminateFragmentRequestProto_descriptor = |
| getDescriptor().getMessageTypes().get(15); |
| internal_static_TerminateFragmentRequestProto_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_TerminateFragmentRequestProto_descriptor, |
| new java.lang.String[] { "QueryIdentifier", "FragmentIdentifierString", }); |
| internal_static_TerminateFragmentResponseProto_descriptor = |
| getDescriptor().getMessageTypes().get(16); |
| internal_static_TerminateFragmentResponseProto_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_TerminateFragmentResponseProto_descriptor, |
| new java.lang.String[] { }); |
| internal_static_GetTokenRequestProto_descriptor = |
| getDescriptor().getMessageTypes().get(17); |
| internal_static_GetTokenRequestProto_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_GetTokenRequestProto_descriptor, |
| new java.lang.String[] { "AppId", }); |
| internal_static_GetTokenResponseProto_descriptor = |
| getDescriptor().getMessageTypes().get(18); |
| internal_static_GetTokenResponseProto_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_GetTokenResponseProto_descriptor, |
| new java.lang.String[] { "Token", }); |
| internal_static_LlapOutputSocketInitMessage_descriptor = |
| getDescriptor().getMessageTypes().get(19); |
| internal_static_LlapOutputSocketInitMessage_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_LlapOutputSocketInitMessage_descriptor, |
| new java.lang.String[] { "FragmentId", "Token", }); |
| return null; |
| } |
| }; |
| com.google.protobuf.Descriptors.FileDescriptor |
| .internalBuildGeneratedFileFrom(descriptorData, |
| new com.google.protobuf.Descriptors.FileDescriptor[] { |
| }, assigner); |
| } |
| |
| // @@protoc_insertion_point(outer_class_scope) |
| } |