| // Generated by the protocol buffer compiler. DO NOT EDIT! |
| // source: ColumnAggregationWithErrorsProtocol.proto |
| |
| package org.apache.hadoop.hbase.coprocessor.protobuf.generated; |
| |
| public final class ColumnAggregationWithErrorsProtos { |
| private ColumnAggregationWithErrorsProtos() {} |
| public static void registerAllExtensions( |
| com.google.protobuf.ExtensionRegistry registry) { |
| } |
| public interface ColumnAggregationWithErrorsSumRequestOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // required bytes family = 1; |
| /** |
| * <code>required bytes family = 1;</code> |
| */ |
| boolean hasFamily(); |
| /** |
| * <code>required bytes family = 1;</code> |
| */ |
| com.google.protobuf.ByteString getFamily(); |
| |
| // optional bytes qualifier = 2; |
| /** |
| * <code>optional bytes qualifier = 2;</code> |
| */ |
| boolean hasQualifier(); |
| /** |
| * <code>optional bytes qualifier = 2;</code> |
| */ |
| com.google.protobuf.ByteString getQualifier(); |
| } |
| /** |
| * Protobuf type {@code ColumnAggregationWithErrorsSumRequest} |
| * |
| * <pre> |
| * use unique names for messages in ColumnAggregationXXX.protos due to a bug in |
| * protoc or hadoop's protoc compiler. |
| * </pre> |
| */ |
| public static final class ColumnAggregationWithErrorsSumRequest extends |
| com.google.protobuf.GeneratedMessage |
| implements ColumnAggregationWithErrorsSumRequestOrBuilder { |
| // Use ColumnAggregationWithErrorsSumRequest.newBuilder() to construct. |
| private ColumnAggregationWithErrorsSumRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { |
| super(builder); |
| this.unknownFields = builder.getUnknownFields(); |
| } |
| private ColumnAggregationWithErrorsSumRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } |
| |
| private static final ColumnAggregationWithErrorsSumRequest defaultInstance; |
| public static ColumnAggregationWithErrorsSumRequest getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public ColumnAggregationWithErrorsSumRequest getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| private final com.google.protobuf.UnknownFieldSet unknownFields; |
| @java.lang.Override |
| public final com.google.protobuf.UnknownFieldSet |
| getUnknownFields() { |
| return this.unknownFields; |
| } |
| private ColumnAggregationWithErrorsSumRequest( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| initFields(); |
| int mutable_bitField0_ = 0; |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder(); |
| try { |
| boolean done = false; |
| while (!done) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| done = true; |
| break; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| done = true; |
| } |
| break; |
| } |
| case 10: { |
| bitField0_ |= 0x00000001; |
| family_ = input.readBytes(); |
| break; |
| } |
| case 18: { |
| bitField0_ |= 0x00000002; |
| qualifier_ = input.readBytes(); |
| break; |
| } |
| } |
| } |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| throw e.setUnfinishedMessage(this); |
| } catch (java.io.IOException e) { |
| throw new com.google.protobuf.InvalidProtocolBufferException( |
| e.getMessage()).setUnfinishedMessage(this); |
| } finally { |
| this.unknownFields = unknownFields.build(); |
| makeExtensionsImmutable(); |
| } |
| } |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_ColumnAggregationWithErrorsSumRequest_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_ColumnAggregationWithErrorsSumRequest_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest.Builder.class); |
| } |
| |
| public static com.google.protobuf.Parser<ColumnAggregationWithErrorsSumRequest> PARSER = |
| new com.google.protobuf.AbstractParser<ColumnAggregationWithErrorsSumRequest>() { |
| public ColumnAggregationWithErrorsSumRequest parsePartialFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return new ColumnAggregationWithErrorsSumRequest(input, extensionRegistry); |
| } |
| }; |
| |
| @java.lang.Override |
| public com.google.protobuf.Parser<ColumnAggregationWithErrorsSumRequest> getParserForType() { |
| return PARSER; |
| } |
| |
| private int bitField0_; |
| // required bytes family = 1; |
| public static final int FAMILY_FIELD_NUMBER = 1; |
| private com.google.protobuf.ByteString family_; |
| /** |
| * <code>required bytes family = 1;</code> |
| */ |
| public boolean hasFamily() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>required bytes family = 1;</code> |
| */ |
| public com.google.protobuf.ByteString getFamily() { |
| return family_; |
| } |
| |
| // optional bytes qualifier = 2; |
| public static final int QUALIFIER_FIELD_NUMBER = 2; |
| private com.google.protobuf.ByteString qualifier_; |
| /** |
| * <code>optional bytes qualifier = 2;</code> |
| */ |
| public boolean hasQualifier() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional bytes qualifier = 2;</code> |
| */ |
| public com.google.protobuf.ByteString getQualifier() { |
| return qualifier_; |
| } |
| |
| private void initFields() { |
| family_ = com.google.protobuf.ByteString.EMPTY; |
| qualifier_ = com.google.protobuf.ByteString.EMPTY; |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| if (!hasFamily()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeBytes(1, family_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| output.writeBytes(2, qualifier_); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(1, family_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBytesSize(2, qualifier_); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest) obj; |
| |
| boolean result = true; |
| result = result && (hasFamily() == other.hasFamily()); |
| if (hasFamily()) { |
| result = result && getFamily() |
| .equals(other.getFamily()); |
| } |
| result = result && (hasQualifier() == other.hasQualifier()); |
| if (hasQualifier()) { |
| result = result && getQualifier() |
| .equals(other.getQualifier()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| private int memoizedHashCode = 0; |
| @java.lang.Override |
| public int hashCode() { |
| if (memoizedHashCode != 0) { |
| return memoizedHashCode; |
| } |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasFamily()) { |
| hash = (37 * hash) + FAMILY_FIELD_NUMBER; |
| hash = (53 * hash) + getFamily().hashCode(); |
| } |
| if (hasQualifier()) { |
| hash = (37 * hash) + QUALIFIER_FIELD_NUMBER; |
| hash = (53 * hash) + getQualifier().hashCode(); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| memoizedHashCode = hash; |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input); |
| } |
| public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| /** |
| * Protobuf type {@code ColumnAggregationWithErrorsSumRequest} |
| * |
| * <pre> |
| * use unique names for messages in ColumnAggregationXXX.protos due to a bug in |
| * protoc or hadoop's protoc compiler. |
| * </pre> |
| */ |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequestOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_ColumnAggregationWithErrorsSumRequest_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_ColumnAggregationWithErrorsSumRequest_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest.Builder.class); |
| } |
| |
| // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| family_ = com.google.protobuf.ByteString.EMPTY; |
| bitField0_ = (bitField0_ & ~0x00000001); |
| qualifier_ = com.google.protobuf.ByteString.EMPTY; |
| bitField0_ = (bitField0_ & ~0x00000002); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_ColumnAggregationWithErrorsSumRequest_descriptor; |
| } |
| |
| public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest getDefaultInstanceForType() { |
| return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest build() { |
| org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest buildPartial() { |
| org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| result.family_ = family_; |
| if (((from_bitField0_ & 0x00000002) == 0x00000002)) { |
| to_bitField0_ |= 0x00000002; |
| } |
| result.qualifier_ = qualifier_; |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest) { |
| return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest other) { |
| if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest.getDefaultInstance()) return this; |
| if (other.hasFamily()) { |
| setFamily(other.getFamily()); |
| } |
| if (other.hasQualifier()) { |
| setQualifier(other.getQualifier()); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| if (!hasFamily()) { |
| |
| return false; |
| } |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest parsedMessage = null; |
| try { |
| parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest) e.getUnfinishedMessage(); |
| throw e; |
| } finally { |
| if (parsedMessage != null) { |
| mergeFrom(parsedMessage); |
| } |
| } |
| return this; |
| } |
| private int bitField0_; |
| |
| // required bytes family = 1; |
| private com.google.protobuf.ByteString family_ = com.google.protobuf.ByteString.EMPTY; |
| /** |
| * <code>required bytes family = 1;</code> |
| */ |
| public boolean hasFamily() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>required bytes family = 1;</code> |
| */ |
| public com.google.protobuf.ByteString getFamily() { |
| return family_; |
| } |
| /** |
| * <code>required bytes family = 1;</code> |
| */ |
| public Builder setFamily(com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000001; |
| family_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>required bytes family = 1;</code> |
| */ |
| public Builder clearFamily() { |
| bitField0_ = (bitField0_ & ~0x00000001); |
| family_ = getDefaultInstance().getFamily(); |
| onChanged(); |
| return this; |
| } |
| |
| // optional bytes qualifier = 2; |
| private com.google.protobuf.ByteString qualifier_ = com.google.protobuf.ByteString.EMPTY; |
| /** |
| * <code>optional bytes qualifier = 2;</code> |
| */ |
| public boolean hasQualifier() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| /** |
| * <code>optional bytes qualifier = 2;</code> |
| */ |
| public com.google.protobuf.ByteString getQualifier() { |
| return qualifier_; |
| } |
| /** |
| * <code>optional bytes qualifier = 2;</code> |
| */ |
| public Builder setQualifier(com.google.protobuf.ByteString value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000002; |
| qualifier_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>optional bytes qualifier = 2;</code> |
| */ |
| public Builder clearQualifier() { |
| bitField0_ = (bitField0_ & ~0x00000002); |
| qualifier_ = getDefaultInstance().getQualifier(); |
| onChanged(); |
| return this; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:ColumnAggregationWithErrorsSumRequest) |
| } |
| |
| static { |
| defaultInstance = new ColumnAggregationWithErrorsSumRequest(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:ColumnAggregationWithErrorsSumRequest) |
| } |
| |
| public interface ColumnAggregationWithErrorsSumResponseOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // required int64 sum = 1; |
| /** |
| * <code>required int64 sum = 1;</code> |
| */ |
| boolean hasSum(); |
| /** |
| * <code>required int64 sum = 1;</code> |
| */ |
| long getSum(); |
| } |
| /** |
| * Protobuf type {@code ColumnAggregationWithErrorsSumResponse} |
| */ |
| public static final class ColumnAggregationWithErrorsSumResponse extends |
| com.google.protobuf.GeneratedMessage |
| implements ColumnAggregationWithErrorsSumResponseOrBuilder { |
| // Use ColumnAggregationWithErrorsSumResponse.newBuilder() to construct. |
| private ColumnAggregationWithErrorsSumResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { |
| super(builder); |
| this.unknownFields = builder.getUnknownFields(); |
| } |
| private ColumnAggregationWithErrorsSumResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } |
| |
| private static final ColumnAggregationWithErrorsSumResponse defaultInstance; |
| public static ColumnAggregationWithErrorsSumResponse getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public ColumnAggregationWithErrorsSumResponse getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| private final com.google.protobuf.UnknownFieldSet unknownFields; |
| @java.lang.Override |
| public final com.google.protobuf.UnknownFieldSet |
| getUnknownFields() { |
| return this.unknownFields; |
| } |
| private ColumnAggregationWithErrorsSumResponse( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| initFields(); |
| int mutable_bitField0_ = 0; |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder(); |
| try { |
| boolean done = false; |
| while (!done) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| done = true; |
| break; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| done = true; |
| } |
| break; |
| } |
| case 8: { |
| bitField0_ |= 0x00000001; |
| sum_ = input.readInt64(); |
| break; |
| } |
| } |
| } |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| throw e.setUnfinishedMessage(this); |
| } catch (java.io.IOException e) { |
| throw new com.google.protobuf.InvalidProtocolBufferException( |
| e.getMessage()).setUnfinishedMessage(this); |
| } finally { |
| this.unknownFields = unknownFields.build(); |
| makeExtensionsImmutable(); |
| } |
| } |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_ColumnAggregationWithErrorsSumResponse_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_ColumnAggregationWithErrorsSumResponse_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.Builder.class); |
| } |
| |
| public static com.google.protobuf.Parser<ColumnAggregationWithErrorsSumResponse> PARSER = |
| new com.google.protobuf.AbstractParser<ColumnAggregationWithErrorsSumResponse>() { |
| public ColumnAggregationWithErrorsSumResponse parsePartialFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return new ColumnAggregationWithErrorsSumResponse(input, extensionRegistry); |
| } |
| }; |
| |
| @java.lang.Override |
| public com.google.protobuf.Parser<ColumnAggregationWithErrorsSumResponse> getParserForType() { |
| return PARSER; |
| } |
| |
| private int bitField0_; |
| // required int64 sum = 1; |
| public static final int SUM_FIELD_NUMBER = 1; |
| private long sum_; |
| /** |
| * <code>required int64 sum = 1;</code> |
| */ |
| public boolean hasSum() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>required int64 sum = 1;</code> |
| */ |
| public long getSum() { |
| return sum_; |
| } |
| |
| private void initFields() { |
| sum_ = 0L; |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| if (!hasSum()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeInt64(1, sum_); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeInt64Size(1, sum_); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse other = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse) obj; |
| |
| boolean result = true; |
| result = result && (hasSum() == other.hasSum()); |
| if (hasSum()) { |
| result = result && (getSum() |
| == other.getSum()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| private int memoizedHashCode = 0; |
| @java.lang.Override |
| public int hashCode() { |
| if (memoizedHashCode != 0) { |
| return memoizedHashCode; |
| } |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasSum()) { |
| hash = (37 * hash) + SUM_FIELD_NUMBER; |
| hash = (53 * hash) + hashLong(getSum()); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| memoizedHashCode = hash; |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data); |
| } |
| public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return PARSER.parseFrom(data, extensionRegistry); |
| } |
| public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input); |
| } |
| public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseDelimitedFrom(input, extensionRegistry); |
| } |
| public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input); |
| } |
| public static org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return PARSER.parseFrom(input, extensionRegistry); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| /** |
| * Protobuf type {@code ColumnAggregationWithErrorsSumResponse} |
| */ |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponseOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_ColumnAggregationWithErrorsSumResponse_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_ColumnAggregationWithErrorsSumResponse_fieldAccessorTable |
| .ensureFieldAccessorsInitialized( |
| org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.class, org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.Builder.class); |
| } |
| |
| // Construct using org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| sum_ = 0L; |
| bitField0_ = (bitField0_ & ~0x00000001); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.internal_static_ColumnAggregationWithErrorsSumResponse_descriptor; |
| } |
| |
| public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse getDefaultInstanceForType() { |
| return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse build() { |
| org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse buildPartial() { |
| org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse result = new org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| result.sum_ = sum_; |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse) { |
| return mergeFrom((org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse other) { |
| if (other == org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.getDefaultInstance()) return this; |
| if (other.hasSum()) { |
| setSum(other.getSum()); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| if (!hasSum()) { |
| |
| return false; |
| } |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse parsedMessage = null; |
| try { |
| parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); |
| } catch (com.google.protobuf.InvalidProtocolBufferException e) { |
| parsedMessage = (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse) e.getUnfinishedMessage(); |
| throw e; |
| } finally { |
| if (parsedMessage != null) { |
| mergeFrom(parsedMessage); |
| } |
| } |
| return this; |
| } |
| private int bitField0_; |
| |
| // required int64 sum = 1; |
| private long sum_ ; |
| /** |
| * <code>required int64 sum = 1;</code> |
| */ |
| public boolean hasSum() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| /** |
| * <code>required int64 sum = 1;</code> |
| */ |
| public long getSum() { |
| return sum_; |
| } |
| /** |
| * <code>required int64 sum = 1;</code> |
| */ |
| public Builder setSum(long value) { |
| bitField0_ |= 0x00000001; |
| sum_ = value; |
| onChanged(); |
| return this; |
| } |
| /** |
| * <code>required int64 sum = 1;</code> |
| */ |
| public Builder clearSum() { |
| bitField0_ = (bitField0_ & ~0x00000001); |
| sum_ = 0L; |
| onChanged(); |
| return this; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:ColumnAggregationWithErrorsSumResponse) |
| } |
| |
| static { |
| defaultInstance = new ColumnAggregationWithErrorsSumResponse(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:ColumnAggregationWithErrorsSumResponse) |
| } |
| |
| /** |
| * Protobuf service {@code ColumnAggregationServiceWithErrors} |
| */ |
| public static abstract class ColumnAggregationServiceWithErrors |
| implements com.google.protobuf.Service { |
| protected ColumnAggregationServiceWithErrors() {} |
| |
| public interface Interface { |
| /** |
| * <code>rpc sum(.ColumnAggregationWithErrorsSumRequest) returns (.ColumnAggregationWithErrorsSumResponse);</code> |
| */ |
| public abstract void sum( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest request, |
| com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse> done); |
| |
| } |
| |
| public static com.google.protobuf.Service newReflectiveService( |
| final Interface impl) { |
| return new ColumnAggregationServiceWithErrors() { |
| @java.lang.Override |
| public void sum( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest request, |
| com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse> done) { |
| impl.sum(controller, request, done); |
| } |
| |
| }; |
| } |
| |
| public static com.google.protobuf.BlockingService |
| newReflectiveBlockingService(final BlockingInterface impl) { |
| return new com.google.protobuf.BlockingService() { |
| public final com.google.protobuf.Descriptors.ServiceDescriptor |
| getDescriptorForType() { |
| return getDescriptor(); |
| } |
| |
| public final com.google.protobuf.Message callBlockingMethod( |
| com.google.protobuf.Descriptors.MethodDescriptor method, |
| com.google.protobuf.RpcController controller, |
| com.google.protobuf.Message request) |
| throws com.google.protobuf.ServiceException { |
| if (method.getService() != getDescriptor()) { |
| throw new java.lang.IllegalArgumentException( |
| "Service.callBlockingMethod() given method descriptor for " + |
| "wrong service type."); |
| } |
| switch(method.getIndex()) { |
| case 0: |
| return impl.sum(controller, (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest)request); |
| default: |
| throw new java.lang.AssertionError("Can't get here."); |
| } |
| } |
| |
| public final com.google.protobuf.Message |
| getRequestPrototype( |
| com.google.protobuf.Descriptors.MethodDescriptor method) { |
| if (method.getService() != getDescriptor()) { |
| throw new java.lang.IllegalArgumentException( |
| "Service.getRequestPrototype() given method " + |
| "descriptor for wrong service type."); |
| } |
| switch(method.getIndex()) { |
| case 0: |
| return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest.getDefaultInstance(); |
| default: |
| throw new java.lang.AssertionError("Can't get here."); |
| } |
| } |
| |
| public final com.google.protobuf.Message |
| getResponsePrototype( |
| com.google.protobuf.Descriptors.MethodDescriptor method) { |
| if (method.getService() != getDescriptor()) { |
| throw new java.lang.IllegalArgumentException( |
| "Service.getResponsePrototype() given method " + |
| "descriptor for wrong service type."); |
| } |
| switch(method.getIndex()) { |
| case 0: |
| return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.getDefaultInstance(); |
| default: |
| throw new java.lang.AssertionError("Can't get here."); |
| } |
| } |
| |
| }; |
| } |
| |
| /** |
| * <code>rpc sum(.ColumnAggregationWithErrorsSumRequest) returns (.ColumnAggregationWithErrorsSumResponse);</code> |
| */ |
| public abstract void sum( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest request, |
| com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse> done); |
| |
| public static final |
| com.google.protobuf.Descriptors.ServiceDescriptor |
| getDescriptor() { |
| return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.getDescriptor().getServices().get(0); |
| } |
| public final com.google.protobuf.Descriptors.ServiceDescriptor |
| getDescriptorForType() { |
| return getDescriptor(); |
| } |
| |
| public final void callMethod( |
| com.google.protobuf.Descriptors.MethodDescriptor method, |
| com.google.protobuf.RpcController controller, |
| com.google.protobuf.Message request, |
| com.google.protobuf.RpcCallback< |
| com.google.protobuf.Message> done) { |
| if (method.getService() != getDescriptor()) { |
| throw new java.lang.IllegalArgumentException( |
| "Service.callMethod() given method descriptor for wrong " + |
| "service type."); |
| } |
| switch(method.getIndex()) { |
| case 0: |
| this.sum(controller, (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest)request, |
| com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse>specializeCallback( |
| done)); |
| return; |
| default: |
| throw new java.lang.AssertionError("Can't get here."); |
| } |
| } |
| |
| public final com.google.protobuf.Message |
| getRequestPrototype( |
| com.google.protobuf.Descriptors.MethodDescriptor method) { |
| if (method.getService() != getDescriptor()) { |
| throw new java.lang.IllegalArgumentException( |
| "Service.getRequestPrototype() given method " + |
| "descriptor for wrong service type."); |
| } |
| switch(method.getIndex()) { |
| case 0: |
| return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest.getDefaultInstance(); |
| default: |
| throw new java.lang.AssertionError("Can't get here."); |
| } |
| } |
| |
| public final com.google.protobuf.Message |
| getResponsePrototype( |
| com.google.protobuf.Descriptors.MethodDescriptor method) { |
| if (method.getService() != getDescriptor()) { |
| throw new java.lang.IllegalArgumentException( |
| "Service.getResponsePrototype() given method " + |
| "descriptor for wrong service type."); |
| } |
| switch(method.getIndex()) { |
| case 0: |
| return org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.getDefaultInstance(); |
| default: |
| throw new java.lang.AssertionError("Can't get here."); |
| } |
| } |
| |
| public static Stub newStub( |
| com.google.protobuf.RpcChannel channel) { |
| return new Stub(channel); |
| } |
| |
| public static final class Stub extends org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationServiceWithErrors implements Interface { |
| private Stub(com.google.protobuf.RpcChannel channel) { |
| this.channel = channel; |
| } |
| |
| private final com.google.protobuf.RpcChannel channel; |
| |
| public com.google.protobuf.RpcChannel getChannel() { |
| return channel; |
| } |
| |
| public void sum( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest request, |
| com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse> done) { |
| channel.callMethod( |
| getDescriptor().getMethods().get(0), |
| controller, |
| request, |
| org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.getDefaultInstance(), |
| com.google.protobuf.RpcUtil.generalizeCallback( |
| done, |
| org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.class, |
| org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.getDefaultInstance())); |
| } |
| } |
| |
| public static BlockingInterface newBlockingStub( |
| com.google.protobuf.BlockingRpcChannel channel) { |
| return new BlockingStub(channel); |
| } |
| |
| public interface BlockingInterface { |
| public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse sum( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest request) |
| throws com.google.protobuf.ServiceException; |
| } |
| |
| private static final class BlockingStub implements BlockingInterface { |
| private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { |
| this.channel = channel; |
| } |
| |
| private final com.google.protobuf.BlockingRpcChannel channel; |
| |
| public org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse sum( |
| com.google.protobuf.RpcController controller, |
| org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumRequest request) |
| throws com.google.protobuf.ServiceException { |
| return (org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse) channel.callBlockingMethod( |
| getDescriptor().getMethods().get(0), |
| controller, |
| request, |
| org.apache.hadoop.hbase.coprocessor.protobuf.generated.ColumnAggregationWithErrorsProtos.ColumnAggregationWithErrorsSumResponse.getDefaultInstance()); |
| } |
| |
| } |
| |
| // @@protoc_insertion_point(class_scope:ColumnAggregationServiceWithErrors) |
| } |
| |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_ColumnAggregationWithErrorsSumRequest_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_ColumnAggregationWithErrorsSumRequest_fieldAccessorTable; |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_ColumnAggregationWithErrorsSumResponse_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_ColumnAggregationWithErrorsSumResponse_fieldAccessorTable; |
| |
| public static com.google.protobuf.Descriptors.FileDescriptor |
| getDescriptor() { |
| return descriptor; |
| } |
| private static com.google.protobuf.Descriptors.FileDescriptor |
| descriptor; |
| static { |
| java.lang.String[] descriptorData = { |
| "\n)ColumnAggregationWithErrorsProtocol.pr" + |
| "oto\"J\n%ColumnAggregationWithErrorsSumReq" + |
| "uest\022\016\n\006family\030\001 \002(\014\022\021\n\tqualifier\030\002 \001(\014\"" + |
| "5\n&ColumnAggregationWithErrorsSumRespons" + |
| "e\022\013\n\003sum\030\001 \002(\0032|\n\"ColumnAggregationServi" + |
| "ceWithErrors\022V\n\003sum\022&.ColumnAggregationW" + |
| "ithErrorsSumRequest\032\'.ColumnAggregationW" + |
| "ithErrorsSumResponseBa\n6org.apache.hadoo" + |
| "p.hbase.coprocessor.protobuf.generatedB!" + |
| "ColumnAggregationWithErrorsProtos\210\001\001\240\001\001" |
| }; |
| com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = |
| new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { |
| public com.google.protobuf.ExtensionRegistry assignDescriptors( |
| com.google.protobuf.Descriptors.FileDescriptor root) { |
| descriptor = root; |
| internal_static_ColumnAggregationWithErrorsSumRequest_descriptor = |
| getDescriptor().getMessageTypes().get(0); |
| internal_static_ColumnAggregationWithErrorsSumRequest_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_ColumnAggregationWithErrorsSumRequest_descriptor, |
| new java.lang.String[] { "Family", "Qualifier", }); |
| internal_static_ColumnAggregationWithErrorsSumResponse_descriptor = |
| getDescriptor().getMessageTypes().get(1); |
| internal_static_ColumnAggregationWithErrorsSumResponse_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_ColumnAggregationWithErrorsSumResponse_descriptor, |
| new java.lang.String[] { "Sum", }); |
| return null; |
| } |
| }; |
| com.google.protobuf.Descriptors.FileDescriptor |
| .internalBuildGeneratedFileFrom(descriptorData, |
| new com.google.protobuf.Descriptors.FileDescriptor[] { |
| }, assigner); |
| } |
| |
| // @@protoc_insertion_point(outer_class_scope) |
| } |