| // Generated by the protocol buffer compiler. DO NOT EDIT! |
| // source: ClusterStatus.proto |
| |
| package org.apache.hadoop.hbase.protobuf.generated; |
| |
| public final class ClusterStatusProtos { |
| private ClusterStatusProtos() {} |
| public static void registerAllExtensions( |
| com.google.protobuf.ExtensionRegistry registry) { |
| } |
| public interface RegionStateOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // required .RegionInfo regionInfo = 1; |
| boolean hasRegionInfo(); |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(); |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(); |
| |
| // required .RegionState.State state = 2; |
| boolean hasState(); |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State getState(); |
| |
| // optional uint64 stamp = 3; |
| boolean hasStamp(); |
| long getStamp(); |
| } |
| public static final class RegionState extends |
| com.google.protobuf.GeneratedMessage |
| implements RegionStateOrBuilder { |
| // Use RegionState.newBuilder() to construct. |
| private RegionState(Builder builder) { |
| super(builder); |
| } |
| private RegionState(boolean noInit) {} |
| |
| private static final RegionState defaultInstance; |
| public static RegionState getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public RegionState getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionState_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionState_fieldAccessorTable; |
| } |
| |
| public enum State |
| implements com.google.protobuf.ProtocolMessageEnum { |
| OFFLINE(0, 0), |
| PENDING_OPEN(1, 1), |
| OPENING(2, 2), |
| OPEN(3, 3), |
| PENDING_CLOSE(4, 4), |
| CLOSING(5, 5), |
| CLOSED(6, 6), |
| SPLITTING(7, 7), |
| SPLIT(8, 8), |
| ; |
| |
| public static final int OFFLINE_VALUE = 0; |
| public static final int PENDING_OPEN_VALUE = 1; |
| public static final int OPENING_VALUE = 2; |
| public static final int OPEN_VALUE = 3; |
| public static final int PENDING_CLOSE_VALUE = 4; |
| public static final int CLOSING_VALUE = 5; |
| public static final int CLOSED_VALUE = 6; |
| public static final int SPLITTING_VALUE = 7; |
| public static final int SPLIT_VALUE = 8; |
| |
| |
| public final int getNumber() { return value; } |
| |
| public static State valueOf(int value) { |
| switch (value) { |
| case 0: return OFFLINE; |
| case 1: return PENDING_OPEN; |
| case 2: return OPENING; |
| case 3: return OPEN; |
| case 4: return PENDING_CLOSE; |
| case 5: return CLOSING; |
| case 6: return CLOSED; |
| case 7: return SPLITTING; |
| case 8: return SPLIT; |
| default: return null; |
| } |
| } |
| |
| public static com.google.protobuf.Internal.EnumLiteMap<State> |
| internalGetValueMap() { |
| return internalValueMap; |
| } |
| private static com.google.protobuf.Internal.EnumLiteMap<State> |
| internalValueMap = |
| new com.google.protobuf.Internal.EnumLiteMap<State>() { |
| public State findValueByNumber(int number) { |
| return State.valueOf(number); |
| } |
| }; |
| |
| public final com.google.protobuf.Descriptors.EnumValueDescriptor |
| getValueDescriptor() { |
| return getDescriptor().getValues().get(index); |
| } |
| public final com.google.protobuf.Descriptors.EnumDescriptor |
| getDescriptorForType() { |
| return getDescriptor(); |
| } |
| public static final com.google.protobuf.Descriptors.EnumDescriptor |
| getDescriptor() { |
| return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDescriptor().getEnumTypes().get(0); |
| } |
| |
| private static final State[] VALUES = { |
| OFFLINE, PENDING_OPEN, OPENING, OPEN, PENDING_CLOSE, CLOSING, CLOSED, SPLITTING, SPLIT, |
| }; |
| |
| public static State valueOf( |
| com.google.protobuf.Descriptors.EnumValueDescriptor desc) { |
| if (desc.getType() != getDescriptor()) { |
| throw new java.lang.IllegalArgumentException( |
| "EnumValueDescriptor is not for this type."); |
| } |
| return VALUES[desc.getIndex()]; |
| } |
| |
| private final int index; |
| private final int value; |
| |
| private State(int index, int value) { |
| this.index = index; |
| this.value = value; |
| } |
| |
| // @@protoc_insertion_point(enum_scope:RegionState.State) |
| } |
| |
| private int bitField0_; |
| // required .RegionInfo regionInfo = 1; |
| public static final int REGIONINFO_FIELD_NUMBER = 1; |
| private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_; |
| public boolean hasRegionInfo() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { |
| return regionInfo_; |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { |
| return regionInfo_; |
| } |
| |
| // required .RegionState.State state = 2; |
| public static final int STATE_FIELD_NUMBER = 2; |
| private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State state_; |
| public boolean hasState() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State getState() { |
| return state_; |
| } |
| |
| // optional uint64 stamp = 3; |
| public static final int STAMP_FIELD_NUMBER = 3; |
| private long stamp_; |
| public boolean hasStamp() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| public long getStamp() { |
| return stamp_; |
| } |
| |
| private void initFields() { |
| regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); |
| state_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; |
| stamp_ = 0L; |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| if (!hasRegionInfo()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| if (!hasState()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| if (!getRegionInfo().isInitialized()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeMessage(1, regionInfo_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| output.writeEnum(2, state_.getNumber()); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| output.writeUInt64(3, stamp_); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(1, regionInfo_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeEnumSize(2, state_.getNumber()); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeUInt64Size(3, stamp_); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState other = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState) obj; |
| |
| boolean result = true; |
| result = result && (hasRegionInfo() == other.hasRegionInfo()); |
| if (hasRegionInfo()) { |
| result = result && getRegionInfo() |
| .equals(other.getRegionInfo()); |
| } |
| result = result && (hasState() == other.hasState()); |
| if (hasState()) { |
| result = result && |
| (getState() == other.getState()); |
| } |
| result = result && (hasStamp() == other.hasStamp()); |
| if (hasStamp()) { |
| result = result && (getStamp() |
| == other.getStamp()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| @java.lang.Override |
| public int hashCode() { |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasRegionInfo()) { |
| hash = (37 * hash) + REGIONINFO_FIELD_NUMBER; |
| hash = (53 * hash) + getRegionInfo().hashCode(); |
| } |
| if (hasState()) { |
| hash = (37 * hash) + STATE_FIELD_NUMBER; |
| hash = (53 * hash) + hashEnum(getState()); |
| } |
| if (hasStamp()) { |
| hash = (37 * hash) + STAMP_FIELD_NUMBER; |
| hash = (53 * hash) + hashLong(getStamp()); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data).buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data, extensionRegistry) |
| .buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data).buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data, extensionRegistry) |
| .buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input).buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input, extensionRegistry) |
| .buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| Builder builder = newBuilder(); |
| if (builder.mergeDelimitedFrom(input)) { |
| return builder.buildParsed(); |
| } else { |
| return null; |
| } |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| Builder builder = newBuilder(); |
| if (builder.mergeDelimitedFrom(input, extensionRegistry)) { |
| return builder.buildParsed(); |
| } else { |
| return null; |
| } |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input).buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input, extensionRegistry) |
| .buildParsed(); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionState_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionState_fieldAccessorTable; |
| } |
| |
| // Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder(BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| getRegionInfoFieldBuilder(); |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| if (regionInfoBuilder_ == null) { |
| regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); |
| } else { |
| regionInfoBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000001); |
| state_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; |
| bitField0_ = (bitField0_ & ~0x00000002); |
| stamp_ = 0L; |
| bitField0_ = (bitField0_ & ~0x00000004); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDescriptor(); |
| } |
| |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState getDefaultInstanceForType() { |
| return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState build() { |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState buildParsed() |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException( |
| result).asInvalidProtocolBufferException(); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState buildPartial() { |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState result = new org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| if (regionInfoBuilder_ == null) { |
| result.regionInfo_ = regionInfo_; |
| } else { |
| result.regionInfo_ = regionInfoBuilder_.build(); |
| } |
| if (((from_bitField0_ & 0x00000002) == 0x00000002)) { |
| to_bitField0_ |= 0x00000002; |
| } |
| result.state_ = state_; |
| if (((from_bitField0_ & 0x00000004) == 0x00000004)) { |
| to_bitField0_ |= 0x00000004; |
| } |
| result.stamp_ = stamp_; |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState) { |
| return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState other) { |
| if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance()) return this; |
| if (other.hasRegionInfo()) { |
| mergeRegionInfo(other.getRegionInfo()); |
| } |
| if (other.hasState()) { |
| setState(other.getState()); |
| } |
| if (other.hasStamp()) { |
| setStamp(other.getStamp()); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| if (!hasRegionInfo()) { |
| |
| return false; |
| } |
| if (!hasState()) { |
| |
| return false; |
| } |
| if (!getRegionInfo().isInitialized()) { |
| |
| return false; |
| } |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder( |
| this.getUnknownFields()); |
| while (true) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| this.setUnknownFields(unknownFields.build()); |
| onChanged(); |
| return this; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| this.setUnknownFields(unknownFields.build()); |
| onChanged(); |
| return this; |
| } |
| break; |
| } |
| case 10: { |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(); |
| if (hasRegionInfo()) { |
| subBuilder.mergeFrom(getRegionInfo()); |
| } |
| input.readMessage(subBuilder, extensionRegistry); |
| setRegionInfo(subBuilder.buildPartial()); |
| break; |
| } |
| case 16: { |
| int rawValue = input.readEnum(); |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State value = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.valueOf(rawValue); |
| if (value == null) { |
| unknownFields.mergeVarintField(2, rawValue); |
| } else { |
| bitField0_ |= 0x00000002; |
| state_ = value; |
| } |
| break; |
| } |
| case 24: { |
| bitField0_ |= 0x00000004; |
| stamp_ = input.readUInt64(); |
| break; |
| } |
| } |
| } |
| } |
| |
| private int bitField0_; |
| |
| // required .RegionInfo regionInfo = 1; |
| private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; |
| public boolean hasRegionInfo() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { |
| if (regionInfoBuilder_ == null) { |
| return regionInfo_; |
| } else { |
| return regionInfoBuilder_.getMessage(); |
| } |
| } |
| public Builder setRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { |
| if (regionInfoBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| regionInfo_ = value; |
| onChanged(); |
| } else { |
| regionInfoBuilder_.setMessage(value); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| public Builder setRegionInfo( |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { |
| if (regionInfoBuilder_ == null) { |
| regionInfo_ = builderForValue.build(); |
| onChanged(); |
| } else { |
| regionInfoBuilder_.setMessage(builderForValue.build()); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| public Builder mergeRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { |
| if (regionInfoBuilder_ == null) { |
| if (((bitField0_ & 0x00000001) == 0x00000001) && |
| regionInfo_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) { |
| regionInfo_ = |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(regionInfo_).mergeFrom(value).buildPartial(); |
| } else { |
| regionInfo_ = value; |
| } |
| onChanged(); |
| } else { |
| regionInfoBuilder_.mergeFrom(value); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| public Builder clearRegionInfo() { |
| if (regionInfoBuilder_ == null) { |
| regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); |
| onChanged(); |
| } else { |
| regionInfoBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000001); |
| return this; |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder() { |
| bitField0_ |= 0x00000001; |
| onChanged(); |
| return getRegionInfoFieldBuilder().getBuilder(); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { |
| if (regionInfoBuilder_ != null) { |
| return regionInfoBuilder_.getMessageOrBuilder(); |
| } else { |
| return regionInfo_; |
| } |
| } |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> |
| getRegionInfoFieldBuilder() { |
| if (regionInfoBuilder_ == null) { |
| regionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( |
| regionInfo_, |
| getParentForChildren(), |
| isClean()); |
| regionInfo_ = null; |
| } |
| return regionInfoBuilder_; |
| } |
| |
| // required .RegionState.State state = 2; |
| private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State state_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; |
| public boolean hasState() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State getState() { |
| return state_; |
| } |
| public Builder setState(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State value) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| bitField0_ |= 0x00000002; |
| state_ = value; |
| onChanged(); |
| return this; |
| } |
| public Builder clearState() { |
| bitField0_ = (bitField0_ & ~0x00000002); |
| state_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE; |
| onChanged(); |
| return this; |
| } |
| |
| // optional uint64 stamp = 3; |
| private long stamp_ ; |
| public boolean hasStamp() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| public long getStamp() { |
| return stamp_; |
| } |
| public Builder setStamp(long value) { |
| bitField0_ |= 0x00000004; |
| stamp_ = value; |
| onChanged(); |
| return this; |
| } |
| public Builder clearStamp() { |
| bitField0_ = (bitField0_ & ~0x00000004); |
| stamp_ = 0L; |
| onChanged(); |
| return this; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:RegionState) |
| } |
| |
| static { |
| defaultInstance = new RegionState(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:RegionState) |
| } |
| |
| public interface RegionInTransitionOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // required .RegionSpecifier spec = 1; |
| boolean hasSpec(); |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getSpec(); |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getSpecOrBuilder(); |
| |
| // required .RegionState regionState = 2; |
| boolean hasRegionState(); |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState getRegionState(); |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder getRegionStateOrBuilder(); |
| } |
| public static final class RegionInTransition extends |
| com.google.protobuf.GeneratedMessage |
| implements RegionInTransitionOrBuilder { |
| // Use RegionInTransition.newBuilder() to construct. |
| private RegionInTransition(Builder builder) { |
| super(builder); |
| } |
| private RegionInTransition(boolean noInit) {} |
| |
| private static final RegionInTransition defaultInstance; |
| public static RegionInTransition getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public RegionInTransition getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionInTransition_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionInTransition_fieldAccessorTable; |
| } |
| |
| private int bitField0_; |
| // required .RegionSpecifier spec = 1; |
| public static final int SPEC_FIELD_NUMBER = 1; |
| private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier spec_; |
| public boolean hasSpec() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getSpec() { |
| return spec_; |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getSpecOrBuilder() { |
| return spec_; |
| } |
| |
| // required .RegionState regionState = 2; |
| public static final int REGIONSTATE_FIELD_NUMBER = 2; |
| private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState regionState_; |
| public boolean hasRegionState() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState getRegionState() { |
| return regionState_; |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder getRegionStateOrBuilder() { |
| return regionState_; |
| } |
| |
| private void initFields() { |
| spec_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); |
| regionState_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| if (!hasSpec()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| if (!hasRegionState()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| if (!getSpec().isInitialized()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| if (!getRegionState().isInitialized()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeMessage(1, spec_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| output.writeMessage(2, regionState_); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(1, spec_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(2, regionState_); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition other = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition) obj; |
| |
| boolean result = true; |
| result = result && (hasSpec() == other.hasSpec()); |
| if (hasSpec()) { |
| result = result && getSpec() |
| .equals(other.getSpec()); |
| } |
| result = result && (hasRegionState() == other.hasRegionState()); |
| if (hasRegionState()) { |
| result = result && getRegionState() |
| .equals(other.getRegionState()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| @java.lang.Override |
| public int hashCode() { |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasSpec()) { |
| hash = (37 * hash) + SPEC_FIELD_NUMBER; |
| hash = (53 * hash) + getSpec().hashCode(); |
| } |
| if (hasRegionState()) { |
| hash = (37 * hash) + REGIONSTATE_FIELD_NUMBER; |
| hash = (53 * hash) + getRegionState().hashCode(); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data).buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data, extensionRegistry) |
| .buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data).buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data, extensionRegistry) |
| .buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input).buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input, extensionRegistry) |
| .buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| Builder builder = newBuilder(); |
| if (builder.mergeDelimitedFrom(input)) { |
| return builder.buildParsed(); |
| } else { |
| return null; |
| } |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| Builder builder = newBuilder(); |
| if (builder.mergeDelimitedFrom(input, extensionRegistry)) { |
| return builder.buildParsed(); |
| } else { |
| return null; |
| } |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input).buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input, extensionRegistry) |
| .buildParsed(); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionInTransition_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_RegionInTransition_fieldAccessorTable; |
| } |
| |
| // Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder(BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| getSpecFieldBuilder(); |
| getRegionStateFieldBuilder(); |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| if (specBuilder_ == null) { |
| spec_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); |
| } else { |
| specBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000001); |
| if (regionStateBuilder_ == null) { |
| regionState_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); |
| } else { |
| regionStateBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000002); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.getDescriptor(); |
| } |
| |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition getDefaultInstanceForType() { |
| return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition build() { |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition buildParsed() |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException( |
| result).asInvalidProtocolBufferException(); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition buildPartial() { |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition result = new org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| if (specBuilder_ == null) { |
| result.spec_ = spec_; |
| } else { |
| result.spec_ = specBuilder_.build(); |
| } |
| if (((from_bitField0_ & 0x00000002) == 0x00000002)) { |
| to_bitField0_ |= 0x00000002; |
| } |
| if (regionStateBuilder_ == null) { |
| result.regionState_ = regionState_; |
| } else { |
| result.regionState_ = regionStateBuilder_.build(); |
| } |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition) { |
| return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition other) { |
| if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.getDefaultInstance()) return this; |
| if (other.hasSpec()) { |
| mergeSpec(other.getSpec()); |
| } |
| if (other.hasRegionState()) { |
| mergeRegionState(other.getRegionState()); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| if (!hasSpec()) { |
| |
| return false; |
| } |
| if (!hasRegionState()) { |
| |
| return false; |
| } |
| if (!getSpec().isInitialized()) { |
| |
| return false; |
| } |
| if (!getRegionState().isInitialized()) { |
| |
| return false; |
| } |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder( |
| this.getUnknownFields()); |
| while (true) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| this.setUnknownFields(unknownFields.build()); |
| onChanged(); |
| return this; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| this.setUnknownFields(unknownFields.build()); |
| onChanged(); |
| return this; |
| } |
| break; |
| } |
| case 10: { |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(); |
| if (hasSpec()) { |
| subBuilder.mergeFrom(getSpec()); |
| } |
| input.readMessage(subBuilder, extensionRegistry); |
| setSpec(subBuilder.buildPartial()); |
| break; |
| } |
| case 18: { |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.newBuilder(); |
| if (hasRegionState()) { |
| subBuilder.mergeFrom(getRegionState()); |
| } |
| input.readMessage(subBuilder, extensionRegistry); |
| setRegionState(subBuilder.buildPartial()); |
| break; |
| } |
| } |
| } |
| } |
| |
| private int bitField0_; |
| |
| // required .RegionSpecifier spec = 1; |
| private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier spec_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> specBuilder_; |
| public boolean hasSpec() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getSpec() { |
| if (specBuilder_ == null) { |
| return spec_; |
| } else { |
| return specBuilder_.getMessage(); |
| } |
| } |
| public Builder setSpec(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { |
| if (specBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| spec_ = value; |
| onChanged(); |
| } else { |
| specBuilder_.setMessage(value); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| public Builder setSpec( |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { |
| if (specBuilder_ == null) { |
| spec_ = builderForValue.build(); |
| onChanged(); |
| } else { |
| specBuilder_.setMessage(builderForValue.build()); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| public Builder mergeSpec(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { |
| if (specBuilder_ == null) { |
| if (((bitField0_ & 0x00000001) == 0x00000001) && |
| spec_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { |
| spec_ = |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(spec_).mergeFrom(value).buildPartial(); |
| } else { |
| spec_ = value; |
| } |
| onChanged(); |
| } else { |
| specBuilder_.mergeFrom(value); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| public Builder clearSpec() { |
| if (specBuilder_ == null) { |
| spec_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); |
| onChanged(); |
| } else { |
| specBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000001); |
| return this; |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getSpecBuilder() { |
| bitField0_ |= 0x00000001; |
| onChanged(); |
| return getSpecFieldBuilder().getBuilder(); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getSpecOrBuilder() { |
| if (specBuilder_ != null) { |
| return specBuilder_.getMessageOrBuilder(); |
| } else { |
| return spec_; |
| } |
| } |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> |
| getSpecFieldBuilder() { |
| if (specBuilder_ == null) { |
| specBuilder_ = new com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( |
| spec_, |
| getParentForChildren(), |
| isClean()); |
| spec_ = null; |
| } |
| return specBuilder_; |
| } |
| |
| // required .RegionState regionState = 2; |
| private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState regionState_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder> regionStateBuilder_; |
| public boolean hasRegionState() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState getRegionState() { |
| if (regionStateBuilder_ == null) { |
| return regionState_; |
| } else { |
| return regionStateBuilder_.getMessage(); |
| } |
| } |
| public Builder setRegionState(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState value) { |
| if (regionStateBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| regionState_ = value; |
| onChanged(); |
| } else { |
| regionStateBuilder_.setMessage(value); |
| } |
| bitField0_ |= 0x00000002; |
| return this; |
| } |
| public Builder setRegionState( |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder builderForValue) { |
| if (regionStateBuilder_ == null) { |
| regionState_ = builderForValue.build(); |
| onChanged(); |
| } else { |
| regionStateBuilder_.setMessage(builderForValue.build()); |
| } |
| bitField0_ |= 0x00000002; |
| return this; |
| } |
| public Builder mergeRegionState(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState value) { |
| if (regionStateBuilder_ == null) { |
| if (((bitField0_ & 0x00000002) == 0x00000002) && |
| regionState_ != org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance()) { |
| regionState_ = |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.newBuilder(regionState_).mergeFrom(value).buildPartial(); |
| } else { |
| regionState_ = value; |
| } |
| onChanged(); |
| } else { |
| regionStateBuilder_.mergeFrom(value); |
| } |
| bitField0_ |= 0x00000002; |
| return this; |
| } |
| public Builder clearRegionState() { |
| if (regionStateBuilder_ == null) { |
| regionState_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.getDefaultInstance(); |
| onChanged(); |
| } else { |
| regionStateBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000002); |
| return this; |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder getRegionStateBuilder() { |
| bitField0_ |= 0x00000002; |
| onChanged(); |
| return getRegionStateFieldBuilder().getBuilder(); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder getRegionStateOrBuilder() { |
| if (regionStateBuilder_ != null) { |
| return regionStateBuilder_.getMessageOrBuilder(); |
| } else { |
| return regionState_; |
| } |
| } |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder> |
| getRegionStateFieldBuilder() { |
| if (regionStateBuilder_ == null) { |
| regionStateBuilder_ = new com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionStateOrBuilder>( |
| regionState_, |
| getParentForChildren(), |
| isClean()); |
| regionState_ = null; |
| } |
| return regionStateBuilder_; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:RegionInTransition) |
| } |
| |
| static { |
| defaultInstance = new RegionInTransition(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:RegionInTransition) |
| } |
| |
| public interface LiveServerInfoOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // required .ServerName server = 1; |
| boolean hasServer(); |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer(); |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder(); |
| |
| // required .ServerLoad serverLoad = 2; |
| boolean hasServerLoad(); |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getServerLoad(); |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder getServerLoadOrBuilder(); |
| } |
| public static final class LiveServerInfo extends |
| com.google.protobuf.GeneratedMessage |
| implements LiveServerInfoOrBuilder { |
| // Use LiveServerInfo.newBuilder() to construct. |
| private LiveServerInfo(Builder builder) { |
| super(builder); |
| } |
| private LiveServerInfo(boolean noInit) {} |
| |
| private static final LiveServerInfo defaultInstance; |
| public static LiveServerInfo getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public LiveServerInfo getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_LiveServerInfo_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_LiveServerInfo_fieldAccessorTable; |
| } |
| |
| private int bitField0_; |
| // required .ServerName server = 1; |
| public static final int SERVER_FIELD_NUMBER = 1; |
| private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_; |
| public boolean hasServer() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { |
| return server_; |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { |
| return server_; |
| } |
| |
| // required .ServerLoad serverLoad = 2; |
| public static final int SERVERLOAD_FIELD_NUMBER = 2; |
| private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad serverLoad_; |
| public boolean hasServerLoad() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getServerLoad() { |
| return serverLoad_; |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder getServerLoadOrBuilder() { |
| return serverLoad_; |
| } |
| |
| private void initFields() { |
| server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); |
| serverLoad_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| if (!hasServer()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| if (!hasServerLoad()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| if (!getServer().isInitialized()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| if (!getServerLoad().isInitialized()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeMessage(1, server_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| output.writeMessage(2, serverLoad_); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(1, server_); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(2, serverLoad_); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo other = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo) obj; |
| |
| boolean result = true; |
| result = result && (hasServer() == other.hasServer()); |
| if (hasServer()) { |
| result = result && getServer() |
| .equals(other.getServer()); |
| } |
| result = result && (hasServerLoad() == other.hasServerLoad()); |
| if (hasServerLoad()) { |
| result = result && getServerLoad() |
| .equals(other.getServerLoad()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| @java.lang.Override |
| public int hashCode() { |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasServer()) { |
| hash = (37 * hash) + SERVER_FIELD_NUMBER; |
| hash = (53 * hash) + getServer().hashCode(); |
| } |
| if (hasServerLoad()) { |
| hash = (37 * hash) + SERVERLOAD_FIELD_NUMBER; |
| hash = (53 * hash) + getServerLoad().hashCode(); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data).buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data, extensionRegistry) |
| .buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data).buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data, extensionRegistry) |
| .buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input).buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input, extensionRegistry) |
| .buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| Builder builder = newBuilder(); |
| if (builder.mergeDelimitedFrom(input)) { |
| return builder.buildParsed(); |
| } else { |
| return null; |
| } |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| Builder builder = newBuilder(); |
| if (builder.mergeDelimitedFrom(input, extensionRegistry)) { |
| return builder.buildParsed(); |
| } else { |
| return null; |
| } |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input).buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input, extensionRegistry) |
| .buildParsed(); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_LiveServerInfo_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_LiveServerInfo_fieldAccessorTable; |
| } |
| |
| // Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder(BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| getServerFieldBuilder(); |
| getServerLoadFieldBuilder(); |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| if (serverBuilder_ == null) { |
| server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); |
| } else { |
| serverBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000001); |
| if (serverLoadBuilder_ == null) { |
| serverLoad_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); |
| } else { |
| serverLoadBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000002); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.getDescriptor(); |
| } |
| |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo getDefaultInstanceForType() { |
| return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo build() { |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo buildParsed() |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException( |
| result).asInvalidProtocolBufferException(); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo buildPartial() { |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo result = new org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| if (serverBuilder_ == null) { |
| result.server_ = server_; |
| } else { |
| result.server_ = serverBuilder_.build(); |
| } |
| if (((from_bitField0_ & 0x00000002) == 0x00000002)) { |
| to_bitField0_ |= 0x00000002; |
| } |
| if (serverLoadBuilder_ == null) { |
| result.serverLoad_ = serverLoad_; |
| } else { |
| result.serverLoad_ = serverLoadBuilder_.build(); |
| } |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo) { |
| return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo other) { |
| if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.getDefaultInstance()) return this; |
| if (other.hasServer()) { |
| mergeServer(other.getServer()); |
| } |
| if (other.hasServerLoad()) { |
| mergeServerLoad(other.getServerLoad()); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| if (!hasServer()) { |
| |
| return false; |
| } |
| if (!hasServerLoad()) { |
| |
| return false; |
| } |
| if (!getServer().isInitialized()) { |
| |
| return false; |
| } |
| if (!getServerLoad().isInitialized()) { |
| |
| return false; |
| } |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder( |
| this.getUnknownFields()); |
| while (true) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| this.setUnknownFields(unknownFields.build()); |
| onChanged(); |
| return this; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| this.setUnknownFields(unknownFields.build()); |
| onChanged(); |
| return this; |
| } |
| break; |
| } |
| case 10: { |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); |
| if (hasServer()) { |
| subBuilder.mergeFrom(getServer()); |
| } |
| input.readMessage(subBuilder, extensionRegistry); |
| setServer(subBuilder.buildPartial()); |
| break; |
| } |
| case 18: { |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.newBuilder(); |
| if (hasServerLoad()) { |
| subBuilder.mergeFrom(getServerLoad()); |
| } |
| input.readMessage(subBuilder, extensionRegistry); |
| setServerLoad(subBuilder.buildPartial()); |
| break; |
| } |
| } |
| } |
| } |
| |
| private int bitField0_; |
| |
| // required .ServerName server = 1; |
| private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_; |
| public boolean hasServer() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() { |
| if (serverBuilder_ == null) { |
| return server_; |
| } else { |
| return serverBuilder_.getMessage(); |
| } |
| } |
| public Builder setServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { |
| if (serverBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| server_ = value; |
| onChanged(); |
| } else { |
| serverBuilder_.setMessage(value); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| public Builder setServer( |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { |
| if (serverBuilder_ == null) { |
| server_ = builderForValue.build(); |
| onChanged(); |
| } else { |
| serverBuilder_.setMessage(builderForValue.build()); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| public Builder mergeServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { |
| if (serverBuilder_ == null) { |
| if (((bitField0_ & 0x00000001) == 0x00000001) && |
| server_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { |
| server_ = |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial(); |
| } else { |
| server_ = value; |
| } |
| onChanged(); |
| } else { |
| serverBuilder_.mergeFrom(value); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| public Builder clearServer() { |
| if (serverBuilder_ == null) { |
| server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); |
| onChanged(); |
| } else { |
| serverBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000001); |
| return this; |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() { |
| bitField0_ |= 0x00000001; |
| onChanged(); |
| return getServerFieldBuilder().getBuilder(); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() { |
| if (serverBuilder_ != null) { |
| return serverBuilder_.getMessageOrBuilder(); |
| } else { |
| return server_; |
| } |
| } |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> |
| getServerFieldBuilder() { |
| if (serverBuilder_ == null) { |
| serverBuilder_ = new com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( |
| server_, |
| getParentForChildren(), |
| isClean()); |
| server_ = null; |
| } |
| return serverBuilder_; |
| } |
| |
| // required .ServerLoad serverLoad = 2; |
| private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad serverLoad_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder> serverLoadBuilder_; |
| public boolean hasServerLoad() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad getServerLoad() { |
| if (serverLoadBuilder_ == null) { |
| return serverLoad_; |
| } else { |
| return serverLoadBuilder_.getMessage(); |
| } |
| } |
| public Builder setServerLoad(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad value) { |
| if (serverLoadBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| serverLoad_ = value; |
| onChanged(); |
| } else { |
| serverLoadBuilder_.setMessage(value); |
| } |
| bitField0_ |= 0x00000002; |
| return this; |
| } |
| public Builder setServerLoad( |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder builderForValue) { |
| if (serverLoadBuilder_ == null) { |
| serverLoad_ = builderForValue.build(); |
| onChanged(); |
| } else { |
| serverLoadBuilder_.setMessage(builderForValue.build()); |
| } |
| bitField0_ |= 0x00000002; |
| return this; |
| } |
| public Builder mergeServerLoad(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad value) { |
| if (serverLoadBuilder_ == null) { |
| if (((bitField0_ & 0x00000002) == 0x00000002) && |
| serverLoad_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance()) { |
| serverLoad_ = |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.newBuilder(serverLoad_).mergeFrom(value).buildPartial(); |
| } else { |
| serverLoad_ = value; |
| } |
| onChanged(); |
| } else { |
| serverLoadBuilder_.mergeFrom(value); |
| } |
| bitField0_ |= 0x00000002; |
| return this; |
| } |
| public Builder clearServerLoad() { |
| if (serverLoadBuilder_ == null) { |
| serverLoad_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.getDefaultInstance(); |
| onChanged(); |
| } else { |
| serverLoadBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000002); |
| return this; |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder getServerLoadBuilder() { |
| bitField0_ |= 0x00000002; |
| onChanged(); |
| return getServerLoadFieldBuilder().getBuilder(); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder getServerLoadOrBuilder() { |
| if (serverLoadBuilder_ != null) { |
| return serverLoadBuilder_.getMessageOrBuilder(); |
| } else { |
| return serverLoad_; |
| } |
| } |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder> |
| getServerLoadFieldBuilder() { |
| if (serverLoadBuilder_ == null) { |
| serverLoadBuilder_ = new com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoad.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerLoadOrBuilder>( |
| serverLoad_, |
| getParentForChildren(), |
| isClean()); |
| serverLoad_ = null; |
| } |
| return serverLoadBuilder_; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:LiveServerInfo) |
| } |
| |
| static { |
| defaultInstance = new LiveServerInfo(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:LiveServerInfo) |
| } |
| |
| public interface ClusterStatusOrBuilder |
| extends com.google.protobuf.MessageOrBuilder { |
| |
| // optional .HBaseVersionFileContent hbaseVersion = 1; |
| boolean hasHbaseVersion(); |
| org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent getHbaseVersion(); |
| org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder getHbaseVersionOrBuilder(); |
| |
| // repeated .LiveServerInfo liveServers = 2; |
| java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo> |
| getLiveServersList(); |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo getLiveServers(int index); |
| int getLiveServersCount(); |
| java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder> |
| getLiveServersOrBuilderList(); |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder getLiveServersOrBuilder( |
| int index); |
| |
| // repeated .ServerName deadServers = 3; |
| java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> |
| getDeadServersList(); |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDeadServers(int index); |
| int getDeadServersCount(); |
| java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> |
| getDeadServersOrBuilderList(); |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDeadServersOrBuilder( |
| int index); |
| |
| // repeated .RegionInTransition regionsInTransition = 4; |
| java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition> |
| getRegionsInTransitionList(); |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition getRegionsInTransition(int index); |
| int getRegionsInTransitionCount(); |
| java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder> |
| getRegionsInTransitionOrBuilderList(); |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder getRegionsInTransitionOrBuilder( |
| int index); |
| |
| // optional .ClusterId clusterId = 5; |
| boolean hasClusterId(); |
| org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId getClusterId(); |
| org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder getClusterIdOrBuilder(); |
| |
| // repeated .Coprocessor masterCoprocessors = 6; |
| java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor> |
| getMasterCoprocessorsList(); |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getMasterCoprocessors(int index); |
| int getMasterCoprocessorsCount(); |
| java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder> |
| getMasterCoprocessorsOrBuilderList(); |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getMasterCoprocessorsOrBuilder( |
| int index); |
| |
| // optional .ServerName master = 7; |
| boolean hasMaster(); |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getMaster(); |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder(); |
| |
| // repeated .ServerName backupMasters = 8; |
| java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> |
| getBackupMastersList(); |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getBackupMasters(int index); |
| int getBackupMastersCount(); |
| java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> |
| getBackupMastersOrBuilderList(); |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getBackupMastersOrBuilder( |
| int index); |
| |
| // optional bool balancerOn = 9; |
| boolean hasBalancerOn(); |
| boolean getBalancerOn(); |
| } |
| public static final class ClusterStatus extends |
| com.google.protobuf.GeneratedMessage |
| implements ClusterStatusOrBuilder { |
| // Use ClusterStatus.newBuilder() to construct. |
| private ClusterStatus(Builder builder) { |
| super(builder); |
| } |
| private ClusterStatus(boolean noInit) {} |
| |
| private static final ClusterStatus defaultInstance; |
| public static ClusterStatus getDefaultInstance() { |
| return defaultInstance; |
| } |
| |
| public ClusterStatus getDefaultInstanceForType() { |
| return defaultInstance; |
| } |
| |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_ClusterStatus_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_ClusterStatus_fieldAccessorTable; |
| } |
| |
| private int bitField0_; |
| // optional .HBaseVersionFileContent hbaseVersion = 1; |
| public static final int HBASEVERSION_FIELD_NUMBER = 1; |
| private org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent hbaseVersion_; |
| public boolean hasHbaseVersion() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent getHbaseVersion() { |
| return hbaseVersion_; |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder getHbaseVersionOrBuilder() { |
| return hbaseVersion_; |
| } |
| |
| // repeated .LiveServerInfo liveServers = 2; |
| public static final int LIVESERVERS_FIELD_NUMBER = 2; |
| private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo> liveServers_; |
| public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo> getLiveServersList() { |
| return liveServers_; |
| } |
| public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder> |
| getLiveServersOrBuilderList() { |
| return liveServers_; |
| } |
| public int getLiveServersCount() { |
| return liveServers_.size(); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo getLiveServers(int index) { |
| return liveServers_.get(index); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder getLiveServersOrBuilder( |
| int index) { |
| return liveServers_.get(index); |
| } |
| |
| // repeated .ServerName deadServers = 3; |
| public static final int DEADSERVERS_FIELD_NUMBER = 3; |
| private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> deadServers_; |
| public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> getDeadServersList() { |
| return deadServers_; |
| } |
| public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> |
| getDeadServersOrBuilderList() { |
| return deadServers_; |
| } |
| public int getDeadServersCount() { |
| return deadServers_.size(); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDeadServers(int index) { |
| return deadServers_.get(index); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDeadServersOrBuilder( |
| int index) { |
| return deadServers_.get(index); |
| } |
| |
| // repeated .RegionInTransition regionsInTransition = 4; |
| public static final int REGIONSINTRANSITION_FIELD_NUMBER = 4; |
| private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition> regionsInTransition_; |
| public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition> getRegionsInTransitionList() { |
| return regionsInTransition_; |
| } |
| public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder> |
| getRegionsInTransitionOrBuilderList() { |
| return regionsInTransition_; |
| } |
| public int getRegionsInTransitionCount() { |
| return regionsInTransition_.size(); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition getRegionsInTransition(int index) { |
| return regionsInTransition_.get(index); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder getRegionsInTransitionOrBuilder( |
| int index) { |
| return regionsInTransition_.get(index); |
| } |
| |
| // optional .ClusterId clusterId = 5; |
| public static final int CLUSTERID_FIELD_NUMBER = 5; |
| private org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId clusterId_; |
| public boolean hasClusterId() { |
| return ((bitField0_ & 0x00000002) == 0x00000002); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId getClusterId() { |
| return clusterId_; |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder getClusterIdOrBuilder() { |
| return clusterId_; |
| } |
| |
| // repeated .Coprocessor masterCoprocessors = 6; |
| public static final int MASTERCOPROCESSORS_FIELD_NUMBER = 6; |
| private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor> masterCoprocessors_; |
| public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor> getMasterCoprocessorsList() { |
| return masterCoprocessors_; |
| } |
| public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder> |
| getMasterCoprocessorsOrBuilderList() { |
| return masterCoprocessors_; |
| } |
| public int getMasterCoprocessorsCount() { |
| return masterCoprocessors_.size(); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getMasterCoprocessors(int index) { |
| return masterCoprocessors_.get(index); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getMasterCoprocessorsOrBuilder( |
| int index) { |
| return masterCoprocessors_.get(index); |
| } |
| |
| // optional .ServerName master = 7; |
| public static final int MASTER_FIELD_NUMBER = 7; |
| private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName master_; |
| public boolean hasMaster() { |
| return ((bitField0_ & 0x00000004) == 0x00000004); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getMaster() { |
| return master_; |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder() { |
| return master_; |
| } |
| |
| // repeated .ServerName backupMasters = 8; |
| public static final int BACKUPMASTERS_FIELD_NUMBER = 8; |
| private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> backupMasters_; |
| public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> getBackupMastersList() { |
| return backupMasters_; |
| } |
| public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> |
| getBackupMastersOrBuilderList() { |
| return backupMasters_; |
| } |
| public int getBackupMastersCount() { |
| return backupMasters_.size(); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getBackupMasters(int index) { |
| return backupMasters_.get(index); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getBackupMastersOrBuilder( |
| int index) { |
| return backupMasters_.get(index); |
| } |
| |
| // optional bool balancerOn = 9; |
| public static final int BALANCERON_FIELD_NUMBER = 9; |
| private boolean balancerOn_; |
| public boolean hasBalancerOn() { |
| return ((bitField0_ & 0x00000008) == 0x00000008); |
| } |
| public boolean getBalancerOn() { |
| return balancerOn_; |
| } |
| |
| private void initFields() { |
| hbaseVersion_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance(); |
| liveServers_ = java.util.Collections.emptyList(); |
| deadServers_ = java.util.Collections.emptyList(); |
| regionsInTransition_ = java.util.Collections.emptyList(); |
| clusterId_ = org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance(); |
| masterCoprocessors_ = java.util.Collections.emptyList(); |
| master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); |
| backupMasters_ = java.util.Collections.emptyList(); |
| balancerOn_ = false; |
| } |
| private byte memoizedIsInitialized = -1; |
| public final boolean isInitialized() { |
| byte isInitialized = memoizedIsInitialized; |
| if (isInitialized != -1) return isInitialized == 1; |
| |
| if (hasHbaseVersion()) { |
| if (!getHbaseVersion().isInitialized()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| } |
| for (int i = 0; i < getLiveServersCount(); i++) { |
| if (!getLiveServers(i).isInitialized()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| } |
| for (int i = 0; i < getDeadServersCount(); i++) { |
| if (!getDeadServers(i).isInitialized()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| } |
| for (int i = 0; i < getRegionsInTransitionCount(); i++) { |
| if (!getRegionsInTransition(i).isInitialized()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| } |
| if (hasClusterId()) { |
| if (!getClusterId().isInitialized()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| } |
| for (int i = 0; i < getMasterCoprocessorsCount(); i++) { |
| if (!getMasterCoprocessors(i).isInitialized()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| } |
| if (hasMaster()) { |
| if (!getMaster().isInitialized()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| } |
| for (int i = 0; i < getBackupMastersCount(); i++) { |
| if (!getBackupMasters(i).isInitialized()) { |
| memoizedIsInitialized = 0; |
| return false; |
| } |
| } |
| memoizedIsInitialized = 1; |
| return true; |
| } |
| |
| public void writeTo(com.google.protobuf.CodedOutputStream output) |
| throws java.io.IOException { |
| getSerializedSize(); |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| output.writeMessage(1, hbaseVersion_); |
| } |
| for (int i = 0; i < liveServers_.size(); i++) { |
| output.writeMessage(2, liveServers_.get(i)); |
| } |
| for (int i = 0; i < deadServers_.size(); i++) { |
| output.writeMessage(3, deadServers_.get(i)); |
| } |
| for (int i = 0; i < regionsInTransition_.size(); i++) { |
| output.writeMessage(4, regionsInTransition_.get(i)); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| output.writeMessage(5, clusterId_); |
| } |
| for (int i = 0; i < masterCoprocessors_.size(); i++) { |
| output.writeMessage(6, masterCoprocessors_.get(i)); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| output.writeMessage(7, master_); |
| } |
| for (int i = 0; i < backupMasters_.size(); i++) { |
| output.writeMessage(8, backupMasters_.get(i)); |
| } |
| if (((bitField0_ & 0x00000008) == 0x00000008)) { |
| output.writeBool(9, balancerOn_); |
| } |
| getUnknownFields().writeTo(output); |
| } |
| |
| private int memoizedSerializedSize = -1; |
| public int getSerializedSize() { |
| int size = memoizedSerializedSize; |
| if (size != -1) return size; |
| |
| size = 0; |
| if (((bitField0_ & 0x00000001) == 0x00000001)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(1, hbaseVersion_); |
| } |
| for (int i = 0; i < liveServers_.size(); i++) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(2, liveServers_.get(i)); |
| } |
| for (int i = 0; i < deadServers_.size(); i++) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(3, deadServers_.get(i)); |
| } |
| for (int i = 0; i < regionsInTransition_.size(); i++) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(4, regionsInTransition_.get(i)); |
| } |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(5, clusterId_); |
| } |
| for (int i = 0; i < masterCoprocessors_.size(); i++) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(6, masterCoprocessors_.get(i)); |
| } |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(7, master_); |
| } |
| for (int i = 0; i < backupMasters_.size(); i++) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeMessageSize(8, backupMasters_.get(i)); |
| } |
| if (((bitField0_ & 0x00000008) == 0x00000008)) { |
| size += com.google.protobuf.CodedOutputStream |
| .computeBoolSize(9, balancerOn_); |
| } |
| size += getUnknownFields().getSerializedSize(); |
| memoizedSerializedSize = size; |
| return size; |
| } |
| |
| private static final long serialVersionUID = 0L; |
| @java.lang.Override |
| protected java.lang.Object writeReplace() |
| throws java.io.ObjectStreamException { |
| return super.writeReplace(); |
| } |
| |
| @java.lang.Override |
| public boolean equals(final java.lang.Object obj) { |
| if (obj == this) { |
| return true; |
| } |
| if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus)) { |
| return super.equals(obj); |
| } |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus other = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus) obj; |
| |
| boolean result = true; |
| result = result && (hasHbaseVersion() == other.hasHbaseVersion()); |
| if (hasHbaseVersion()) { |
| result = result && getHbaseVersion() |
| .equals(other.getHbaseVersion()); |
| } |
| result = result && getLiveServersList() |
| .equals(other.getLiveServersList()); |
| result = result && getDeadServersList() |
| .equals(other.getDeadServersList()); |
| result = result && getRegionsInTransitionList() |
| .equals(other.getRegionsInTransitionList()); |
| result = result && (hasClusterId() == other.hasClusterId()); |
| if (hasClusterId()) { |
| result = result && getClusterId() |
| .equals(other.getClusterId()); |
| } |
| result = result && getMasterCoprocessorsList() |
| .equals(other.getMasterCoprocessorsList()); |
| result = result && (hasMaster() == other.hasMaster()); |
| if (hasMaster()) { |
| result = result && getMaster() |
| .equals(other.getMaster()); |
| } |
| result = result && getBackupMastersList() |
| .equals(other.getBackupMastersList()); |
| result = result && (hasBalancerOn() == other.hasBalancerOn()); |
| if (hasBalancerOn()) { |
| result = result && (getBalancerOn() |
| == other.getBalancerOn()); |
| } |
| result = result && |
| getUnknownFields().equals(other.getUnknownFields()); |
| return result; |
| } |
| |
| @java.lang.Override |
| public int hashCode() { |
| int hash = 41; |
| hash = (19 * hash) + getDescriptorForType().hashCode(); |
| if (hasHbaseVersion()) { |
| hash = (37 * hash) + HBASEVERSION_FIELD_NUMBER; |
| hash = (53 * hash) + getHbaseVersion().hashCode(); |
| } |
| if (getLiveServersCount() > 0) { |
| hash = (37 * hash) + LIVESERVERS_FIELD_NUMBER; |
| hash = (53 * hash) + getLiveServersList().hashCode(); |
| } |
| if (getDeadServersCount() > 0) { |
| hash = (37 * hash) + DEADSERVERS_FIELD_NUMBER; |
| hash = (53 * hash) + getDeadServersList().hashCode(); |
| } |
| if (getRegionsInTransitionCount() > 0) { |
| hash = (37 * hash) + REGIONSINTRANSITION_FIELD_NUMBER; |
| hash = (53 * hash) + getRegionsInTransitionList().hashCode(); |
| } |
| if (hasClusterId()) { |
| hash = (37 * hash) + CLUSTERID_FIELD_NUMBER; |
| hash = (53 * hash) + getClusterId().hashCode(); |
| } |
| if (getMasterCoprocessorsCount() > 0) { |
| hash = (37 * hash) + MASTERCOPROCESSORS_FIELD_NUMBER; |
| hash = (53 * hash) + getMasterCoprocessorsList().hashCode(); |
| } |
| if (hasMaster()) { |
| hash = (37 * hash) + MASTER_FIELD_NUMBER; |
| hash = (53 * hash) + getMaster().hashCode(); |
| } |
| if (getBackupMastersCount() > 0) { |
| hash = (37 * hash) + BACKUPMASTERS_FIELD_NUMBER; |
| hash = (53 * hash) + getBackupMastersList().hashCode(); |
| } |
| if (hasBalancerOn()) { |
| hash = (37 * hash) + BALANCERON_FIELD_NUMBER; |
| hash = (53 * hash) + hashBoolean(getBalancerOn()); |
| } |
| hash = (29 * hash) + getUnknownFields().hashCode(); |
| return hash; |
| } |
| |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( |
| com.google.protobuf.ByteString data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data).buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( |
| com.google.protobuf.ByteString data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data, extensionRegistry) |
| .buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom(byte[] data) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data).buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( |
| byte[] data, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| return newBuilder().mergeFrom(data, extensionRegistry) |
| .buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input).buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input, extensionRegistry) |
| .buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseDelimitedFrom(java.io.InputStream input) |
| throws java.io.IOException { |
| Builder builder = newBuilder(); |
| if (builder.mergeDelimitedFrom(input)) { |
| return builder.buildParsed(); |
| } else { |
| return null; |
| } |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseDelimitedFrom( |
| java.io.InputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| Builder builder = newBuilder(); |
| if (builder.mergeDelimitedFrom(input, extensionRegistry)) { |
| return builder.buildParsed(); |
| } else { |
| return null; |
| } |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( |
| com.google.protobuf.CodedInputStream input) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input).buildParsed(); |
| } |
| public static org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus parseFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| return newBuilder().mergeFrom(input, extensionRegistry) |
| .buildParsed(); |
| } |
| |
| public static Builder newBuilder() { return Builder.create(); } |
| public Builder newBuilderForType() { return newBuilder(); } |
| public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus prototype) { |
| return newBuilder().mergeFrom(prototype); |
| } |
| public Builder toBuilder() { return newBuilder(this); } |
| |
| @java.lang.Override |
| protected Builder newBuilderForType( |
| com.google.protobuf.GeneratedMessage.BuilderParent parent) { |
| Builder builder = new Builder(parent); |
| return builder; |
| } |
| public static final class Builder extends |
| com.google.protobuf.GeneratedMessage.Builder<Builder> |
| implements org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatusOrBuilder { |
| public static final com.google.protobuf.Descriptors.Descriptor |
| getDescriptor() { |
| return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_ClusterStatus_descriptor; |
| } |
| |
| protected com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internalGetFieldAccessorTable() { |
| return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.internal_static_ClusterStatus_fieldAccessorTable; |
| } |
| |
| // Construct using org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.newBuilder() |
| private Builder() { |
| maybeForceBuilderInitialization(); |
| } |
| |
| private Builder(BuilderParent parent) { |
| super(parent); |
| maybeForceBuilderInitialization(); |
| } |
| private void maybeForceBuilderInitialization() { |
| if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { |
| getHbaseVersionFieldBuilder(); |
| getLiveServersFieldBuilder(); |
| getDeadServersFieldBuilder(); |
| getRegionsInTransitionFieldBuilder(); |
| getClusterIdFieldBuilder(); |
| getMasterCoprocessorsFieldBuilder(); |
| getMasterFieldBuilder(); |
| getBackupMastersFieldBuilder(); |
| } |
| } |
| private static Builder create() { |
| return new Builder(); |
| } |
| |
| public Builder clear() { |
| super.clear(); |
| if (hbaseVersionBuilder_ == null) { |
| hbaseVersion_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance(); |
| } else { |
| hbaseVersionBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000001); |
| if (liveServersBuilder_ == null) { |
| liveServers_ = java.util.Collections.emptyList(); |
| bitField0_ = (bitField0_ & ~0x00000002); |
| } else { |
| liveServersBuilder_.clear(); |
| } |
| if (deadServersBuilder_ == null) { |
| deadServers_ = java.util.Collections.emptyList(); |
| bitField0_ = (bitField0_ & ~0x00000004); |
| } else { |
| deadServersBuilder_.clear(); |
| } |
| if (regionsInTransitionBuilder_ == null) { |
| regionsInTransition_ = java.util.Collections.emptyList(); |
| bitField0_ = (bitField0_ & ~0x00000008); |
| } else { |
| regionsInTransitionBuilder_.clear(); |
| } |
| if (clusterIdBuilder_ == null) { |
| clusterId_ = org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance(); |
| } else { |
| clusterIdBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000010); |
| if (masterCoprocessorsBuilder_ == null) { |
| masterCoprocessors_ = java.util.Collections.emptyList(); |
| bitField0_ = (bitField0_ & ~0x00000020); |
| } else { |
| masterCoprocessorsBuilder_.clear(); |
| } |
| if (masterBuilder_ == null) { |
| master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); |
| } else { |
| masterBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000040); |
| if (backupMastersBuilder_ == null) { |
| backupMasters_ = java.util.Collections.emptyList(); |
| bitField0_ = (bitField0_ & ~0x00000080); |
| } else { |
| backupMastersBuilder_.clear(); |
| } |
| balancerOn_ = false; |
| bitField0_ = (bitField0_ & ~0x00000100); |
| return this; |
| } |
| |
| public Builder clone() { |
| return create().mergeFrom(buildPartial()); |
| } |
| |
| public com.google.protobuf.Descriptors.Descriptor |
| getDescriptorForType() { |
| return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDescriptor(); |
| } |
| |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus getDefaultInstanceForType() { |
| return org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance(); |
| } |
| |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus build() { |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException(result); |
| } |
| return result; |
| } |
| |
| private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus buildParsed() |
| throws com.google.protobuf.InvalidProtocolBufferException { |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus result = buildPartial(); |
| if (!result.isInitialized()) { |
| throw newUninitializedMessageException( |
| result).asInvalidProtocolBufferException(); |
| } |
| return result; |
| } |
| |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus buildPartial() { |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus result = new org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus(this); |
| int from_bitField0_ = bitField0_; |
| int to_bitField0_ = 0; |
| if (((from_bitField0_ & 0x00000001) == 0x00000001)) { |
| to_bitField0_ |= 0x00000001; |
| } |
| if (hbaseVersionBuilder_ == null) { |
| result.hbaseVersion_ = hbaseVersion_; |
| } else { |
| result.hbaseVersion_ = hbaseVersionBuilder_.build(); |
| } |
| if (liveServersBuilder_ == null) { |
| if (((bitField0_ & 0x00000002) == 0x00000002)) { |
| liveServers_ = java.util.Collections.unmodifiableList(liveServers_); |
| bitField0_ = (bitField0_ & ~0x00000002); |
| } |
| result.liveServers_ = liveServers_; |
| } else { |
| result.liveServers_ = liveServersBuilder_.build(); |
| } |
| if (deadServersBuilder_ == null) { |
| if (((bitField0_ & 0x00000004) == 0x00000004)) { |
| deadServers_ = java.util.Collections.unmodifiableList(deadServers_); |
| bitField0_ = (bitField0_ & ~0x00000004); |
| } |
| result.deadServers_ = deadServers_; |
| } else { |
| result.deadServers_ = deadServersBuilder_.build(); |
| } |
| if (regionsInTransitionBuilder_ == null) { |
| if (((bitField0_ & 0x00000008) == 0x00000008)) { |
| regionsInTransition_ = java.util.Collections.unmodifiableList(regionsInTransition_); |
| bitField0_ = (bitField0_ & ~0x00000008); |
| } |
| result.regionsInTransition_ = regionsInTransition_; |
| } else { |
| result.regionsInTransition_ = regionsInTransitionBuilder_.build(); |
| } |
| if (((from_bitField0_ & 0x00000010) == 0x00000010)) { |
| to_bitField0_ |= 0x00000002; |
| } |
| if (clusterIdBuilder_ == null) { |
| result.clusterId_ = clusterId_; |
| } else { |
| result.clusterId_ = clusterIdBuilder_.build(); |
| } |
| if (masterCoprocessorsBuilder_ == null) { |
| if (((bitField0_ & 0x00000020) == 0x00000020)) { |
| masterCoprocessors_ = java.util.Collections.unmodifiableList(masterCoprocessors_); |
| bitField0_ = (bitField0_ & ~0x00000020); |
| } |
| result.masterCoprocessors_ = masterCoprocessors_; |
| } else { |
| result.masterCoprocessors_ = masterCoprocessorsBuilder_.build(); |
| } |
| if (((from_bitField0_ & 0x00000040) == 0x00000040)) { |
| to_bitField0_ |= 0x00000004; |
| } |
| if (masterBuilder_ == null) { |
| result.master_ = master_; |
| } else { |
| result.master_ = masterBuilder_.build(); |
| } |
| if (backupMastersBuilder_ == null) { |
| if (((bitField0_ & 0x00000080) == 0x00000080)) { |
| backupMasters_ = java.util.Collections.unmodifiableList(backupMasters_); |
| bitField0_ = (bitField0_ & ~0x00000080); |
| } |
| result.backupMasters_ = backupMasters_; |
| } else { |
| result.backupMasters_ = backupMastersBuilder_.build(); |
| } |
| if (((from_bitField0_ & 0x00000100) == 0x00000100)) { |
| to_bitField0_ |= 0x00000008; |
| } |
| result.balancerOn_ = balancerOn_; |
| result.bitField0_ = to_bitField0_; |
| onBuilt(); |
| return result; |
| } |
| |
| public Builder mergeFrom(com.google.protobuf.Message other) { |
| if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus) { |
| return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus)other); |
| } else { |
| super.mergeFrom(other); |
| return this; |
| } |
| } |
| |
| public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus other) { |
| if (other == org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.getDefaultInstance()) return this; |
| if (other.hasHbaseVersion()) { |
| mergeHbaseVersion(other.getHbaseVersion()); |
| } |
| if (liveServersBuilder_ == null) { |
| if (!other.liveServers_.isEmpty()) { |
| if (liveServers_.isEmpty()) { |
| liveServers_ = other.liveServers_; |
| bitField0_ = (bitField0_ & ~0x00000002); |
| } else { |
| ensureLiveServersIsMutable(); |
| liveServers_.addAll(other.liveServers_); |
| } |
| onChanged(); |
| } |
| } else { |
| if (!other.liveServers_.isEmpty()) { |
| if (liveServersBuilder_.isEmpty()) { |
| liveServersBuilder_.dispose(); |
| liveServersBuilder_ = null; |
| liveServers_ = other.liveServers_; |
| bitField0_ = (bitField0_ & ~0x00000002); |
| liveServersBuilder_ = |
| com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? |
| getLiveServersFieldBuilder() : null; |
| } else { |
| liveServersBuilder_.addAllMessages(other.liveServers_); |
| } |
| } |
| } |
| if (deadServersBuilder_ == null) { |
| if (!other.deadServers_.isEmpty()) { |
| if (deadServers_.isEmpty()) { |
| deadServers_ = other.deadServers_; |
| bitField0_ = (bitField0_ & ~0x00000004); |
| } else { |
| ensureDeadServersIsMutable(); |
| deadServers_.addAll(other.deadServers_); |
| } |
| onChanged(); |
| } |
| } else { |
| if (!other.deadServers_.isEmpty()) { |
| if (deadServersBuilder_.isEmpty()) { |
| deadServersBuilder_.dispose(); |
| deadServersBuilder_ = null; |
| deadServers_ = other.deadServers_; |
| bitField0_ = (bitField0_ & ~0x00000004); |
| deadServersBuilder_ = |
| com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? |
| getDeadServersFieldBuilder() : null; |
| } else { |
| deadServersBuilder_.addAllMessages(other.deadServers_); |
| } |
| } |
| } |
| if (regionsInTransitionBuilder_ == null) { |
| if (!other.regionsInTransition_.isEmpty()) { |
| if (regionsInTransition_.isEmpty()) { |
| regionsInTransition_ = other.regionsInTransition_; |
| bitField0_ = (bitField0_ & ~0x00000008); |
| } else { |
| ensureRegionsInTransitionIsMutable(); |
| regionsInTransition_.addAll(other.regionsInTransition_); |
| } |
| onChanged(); |
| } |
| } else { |
| if (!other.regionsInTransition_.isEmpty()) { |
| if (regionsInTransitionBuilder_.isEmpty()) { |
| regionsInTransitionBuilder_.dispose(); |
| regionsInTransitionBuilder_ = null; |
| regionsInTransition_ = other.regionsInTransition_; |
| bitField0_ = (bitField0_ & ~0x00000008); |
| regionsInTransitionBuilder_ = |
| com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? |
| getRegionsInTransitionFieldBuilder() : null; |
| } else { |
| regionsInTransitionBuilder_.addAllMessages(other.regionsInTransition_); |
| } |
| } |
| } |
| if (other.hasClusterId()) { |
| mergeClusterId(other.getClusterId()); |
| } |
| if (masterCoprocessorsBuilder_ == null) { |
| if (!other.masterCoprocessors_.isEmpty()) { |
| if (masterCoprocessors_.isEmpty()) { |
| masterCoprocessors_ = other.masterCoprocessors_; |
| bitField0_ = (bitField0_ & ~0x00000020); |
| } else { |
| ensureMasterCoprocessorsIsMutable(); |
| masterCoprocessors_.addAll(other.masterCoprocessors_); |
| } |
| onChanged(); |
| } |
| } else { |
| if (!other.masterCoprocessors_.isEmpty()) { |
| if (masterCoprocessorsBuilder_.isEmpty()) { |
| masterCoprocessorsBuilder_.dispose(); |
| masterCoprocessorsBuilder_ = null; |
| masterCoprocessors_ = other.masterCoprocessors_; |
| bitField0_ = (bitField0_ & ~0x00000020); |
| masterCoprocessorsBuilder_ = |
| com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? |
| getMasterCoprocessorsFieldBuilder() : null; |
| } else { |
| masterCoprocessorsBuilder_.addAllMessages(other.masterCoprocessors_); |
| } |
| } |
| } |
| if (other.hasMaster()) { |
| mergeMaster(other.getMaster()); |
| } |
| if (backupMastersBuilder_ == null) { |
| if (!other.backupMasters_.isEmpty()) { |
| if (backupMasters_.isEmpty()) { |
| backupMasters_ = other.backupMasters_; |
| bitField0_ = (bitField0_ & ~0x00000080); |
| } else { |
| ensureBackupMastersIsMutable(); |
| backupMasters_.addAll(other.backupMasters_); |
| } |
| onChanged(); |
| } |
| } else { |
| if (!other.backupMasters_.isEmpty()) { |
| if (backupMastersBuilder_.isEmpty()) { |
| backupMastersBuilder_.dispose(); |
| backupMastersBuilder_ = null; |
| backupMasters_ = other.backupMasters_; |
| bitField0_ = (bitField0_ & ~0x00000080); |
| backupMastersBuilder_ = |
| com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? |
| getBackupMastersFieldBuilder() : null; |
| } else { |
| backupMastersBuilder_.addAllMessages(other.backupMasters_); |
| } |
| } |
| } |
| if (other.hasBalancerOn()) { |
| setBalancerOn(other.getBalancerOn()); |
| } |
| this.mergeUnknownFields(other.getUnknownFields()); |
| return this; |
| } |
| |
| public final boolean isInitialized() { |
| if (hasHbaseVersion()) { |
| if (!getHbaseVersion().isInitialized()) { |
| |
| return false; |
| } |
| } |
| for (int i = 0; i < getLiveServersCount(); i++) { |
| if (!getLiveServers(i).isInitialized()) { |
| |
| return false; |
| } |
| } |
| for (int i = 0; i < getDeadServersCount(); i++) { |
| if (!getDeadServers(i).isInitialized()) { |
| |
| return false; |
| } |
| } |
| for (int i = 0; i < getRegionsInTransitionCount(); i++) { |
| if (!getRegionsInTransition(i).isInitialized()) { |
| |
| return false; |
| } |
| } |
| if (hasClusterId()) { |
| if (!getClusterId().isInitialized()) { |
| |
| return false; |
| } |
| } |
| for (int i = 0; i < getMasterCoprocessorsCount(); i++) { |
| if (!getMasterCoprocessors(i).isInitialized()) { |
| |
| return false; |
| } |
| } |
| if (hasMaster()) { |
| if (!getMaster().isInitialized()) { |
| |
| return false; |
| } |
| } |
| for (int i = 0; i < getBackupMastersCount(); i++) { |
| if (!getBackupMasters(i).isInitialized()) { |
| |
| return false; |
| } |
| } |
| return true; |
| } |
| |
| public Builder mergeFrom( |
| com.google.protobuf.CodedInputStream input, |
| com.google.protobuf.ExtensionRegistryLite extensionRegistry) |
| throws java.io.IOException { |
| com.google.protobuf.UnknownFieldSet.Builder unknownFields = |
| com.google.protobuf.UnknownFieldSet.newBuilder( |
| this.getUnknownFields()); |
| while (true) { |
| int tag = input.readTag(); |
| switch (tag) { |
| case 0: |
| this.setUnknownFields(unknownFields.build()); |
| onChanged(); |
| return this; |
| default: { |
| if (!parseUnknownField(input, unknownFields, |
| extensionRegistry, tag)) { |
| this.setUnknownFields(unknownFields.build()); |
| onChanged(); |
| return this; |
| } |
| break; |
| } |
| case 10: { |
| org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.newBuilder(); |
| if (hasHbaseVersion()) { |
| subBuilder.mergeFrom(getHbaseVersion()); |
| } |
| input.readMessage(subBuilder, extensionRegistry); |
| setHbaseVersion(subBuilder.buildPartial()); |
| break; |
| } |
| case 18: { |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.newBuilder(); |
| input.readMessage(subBuilder, extensionRegistry); |
| addLiveServers(subBuilder.buildPartial()); |
| break; |
| } |
| case 26: { |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); |
| input.readMessage(subBuilder, extensionRegistry); |
| addDeadServers(subBuilder.buildPartial()); |
| break; |
| } |
| case 34: { |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.newBuilder(); |
| input.readMessage(subBuilder, extensionRegistry); |
| addRegionsInTransition(subBuilder.buildPartial()); |
| break; |
| } |
| case 42: { |
| org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.newBuilder(); |
| if (hasClusterId()) { |
| subBuilder.mergeFrom(getClusterId()); |
| } |
| input.readMessage(subBuilder, extensionRegistry); |
| setClusterId(subBuilder.buildPartial()); |
| break; |
| } |
| case 50: { |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.newBuilder(); |
| input.readMessage(subBuilder, extensionRegistry); |
| addMasterCoprocessors(subBuilder.buildPartial()); |
| break; |
| } |
| case 58: { |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); |
| if (hasMaster()) { |
| subBuilder.mergeFrom(getMaster()); |
| } |
| input.readMessage(subBuilder, extensionRegistry); |
| setMaster(subBuilder.buildPartial()); |
| break; |
| } |
| case 66: { |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(); |
| input.readMessage(subBuilder, extensionRegistry); |
| addBackupMasters(subBuilder.buildPartial()); |
| break; |
| } |
| case 72: { |
| bitField0_ |= 0x00000100; |
| balancerOn_ = input.readBool(); |
| break; |
| } |
| } |
| } |
| } |
| |
| private int bitField0_; |
| |
| // optional .HBaseVersionFileContent hbaseVersion = 1; |
| private org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent hbaseVersion_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance(); |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder> hbaseVersionBuilder_; |
| public boolean hasHbaseVersion() { |
| return ((bitField0_ & 0x00000001) == 0x00000001); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent getHbaseVersion() { |
| if (hbaseVersionBuilder_ == null) { |
| return hbaseVersion_; |
| } else { |
| return hbaseVersionBuilder_.getMessage(); |
| } |
| } |
| public Builder setHbaseVersion(org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent value) { |
| if (hbaseVersionBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| hbaseVersion_ = value; |
| onChanged(); |
| } else { |
| hbaseVersionBuilder_.setMessage(value); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| public Builder setHbaseVersion( |
| org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder builderForValue) { |
| if (hbaseVersionBuilder_ == null) { |
| hbaseVersion_ = builderForValue.build(); |
| onChanged(); |
| } else { |
| hbaseVersionBuilder_.setMessage(builderForValue.build()); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| public Builder mergeHbaseVersion(org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent value) { |
| if (hbaseVersionBuilder_ == null) { |
| if (((bitField0_ & 0x00000001) == 0x00000001) && |
| hbaseVersion_ != org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance()) { |
| hbaseVersion_ = |
| org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.newBuilder(hbaseVersion_).mergeFrom(value).buildPartial(); |
| } else { |
| hbaseVersion_ = value; |
| } |
| onChanged(); |
| } else { |
| hbaseVersionBuilder_.mergeFrom(value); |
| } |
| bitField0_ |= 0x00000001; |
| return this; |
| } |
| public Builder clearHbaseVersion() { |
| if (hbaseVersionBuilder_ == null) { |
| hbaseVersion_ = org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.getDefaultInstance(); |
| onChanged(); |
| } else { |
| hbaseVersionBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000001); |
| return this; |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder getHbaseVersionBuilder() { |
| bitField0_ |= 0x00000001; |
| onChanged(); |
| return getHbaseVersionFieldBuilder().getBuilder(); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder getHbaseVersionOrBuilder() { |
| if (hbaseVersionBuilder_ != null) { |
| return hbaseVersionBuilder_.getMessageOrBuilder(); |
| } else { |
| return hbaseVersion_; |
| } |
| } |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder> |
| getHbaseVersionFieldBuilder() { |
| if (hbaseVersionBuilder_ == null) { |
| hbaseVersionBuilder_ = new com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContent.Builder, org.apache.hadoop.hbase.protobuf.generated.FSProtos.HBaseVersionFileContentOrBuilder>( |
| hbaseVersion_, |
| getParentForChildren(), |
| isClean()); |
| hbaseVersion_ = null; |
| } |
| return hbaseVersionBuilder_; |
| } |
| |
| // repeated .LiveServerInfo liveServers = 2; |
| private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo> liveServers_ = |
| java.util.Collections.emptyList(); |
| private void ensureLiveServersIsMutable() { |
| if (!((bitField0_ & 0x00000002) == 0x00000002)) { |
| liveServers_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo>(liveServers_); |
| bitField0_ |= 0x00000002; |
| } |
| } |
| |
| private com.google.protobuf.RepeatedFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder> liveServersBuilder_; |
| |
| public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo> getLiveServersList() { |
| if (liveServersBuilder_ == null) { |
| return java.util.Collections.unmodifiableList(liveServers_); |
| } else { |
| return liveServersBuilder_.getMessageList(); |
| } |
| } |
| public int getLiveServersCount() { |
| if (liveServersBuilder_ == null) { |
| return liveServers_.size(); |
| } else { |
| return liveServersBuilder_.getCount(); |
| } |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo getLiveServers(int index) { |
| if (liveServersBuilder_ == null) { |
| return liveServers_.get(index); |
| } else { |
| return liveServersBuilder_.getMessage(index); |
| } |
| } |
| public Builder setLiveServers( |
| int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo value) { |
| if (liveServersBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureLiveServersIsMutable(); |
| liveServers_.set(index, value); |
| onChanged(); |
| } else { |
| liveServersBuilder_.setMessage(index, value); |
| } |
| return this; |
| } |
| public Builder setLiveServers( |
| int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder builderForValue) { |
| if (liveServersBuilder_ == null) { |
| ensureLiveServersIsMutable(); |
| liveServers_.set(index, builderForValue.build()); |
| onChanged(); |
| } else { |
| liveServersBuilder_.setMessage(index, builderForValue.build()); |
| } |
| return this; |
| } |
| public Builder addLiveServers(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo value) { |
| if (liveServersBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureLiveServersIsMutable(); |
| liveServers_.add(value); |
| onChanged(); |
| } else { |
| liveServersBuilder_.addMessage(value); |
| } |
| return this; |
| } |
| public Builder addLiveServers( |
| int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo value) { |
| if (liveServersBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureLiveServersIsMutable(); |
| liveServers_.add(index, value); |
| onChanged(); |
| } else { |
| liveServersBuilder_.addMessage(index, value); |
| } |
| return this; |
| } |
| public Builder addLiveServers( |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder builderForValue) { |
| if (liveServersBuilder_ == null) { |
| ensureLiveServersIsMutable(); |
| liveServers_.add(builderForValue.build()); |
| onChanged(); |
| } else { |
| liveServersBuilder_.addMessage(builderForValue.build()); |
| } |
| return this; |
| } |
| public Builder addLiveServers( |
| int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder builderForValue) { |
| if (liveServersBuilder_ == null) { |
| ensureLiveServersIsMutable(); |
| liveServers_.add(index, builderForValue.build()); |
| onChanged(); |
| } else { |
| liveServersBuilder_.addMessage(index, builderForValue.build()); |
| } |
| return this; |
| } |
| public Builder addAllLiveServers( |
| java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo> values) { |
| if (liveServersBuilder_ == null) { |
| ensureLiveServersIsMutable(); |
| super.addAll(values, liveServers_); |
| onChanged(); |
| } else { |
| liveServersBuilder_.addAllMessages(values); |
| } |
| return this; |
| } |
| public Builder clearLiveServers() { |
| if (liveServersBuilder_ == null) { |
| liveServers_ = java.util.Collections.emptyList(); |
| bitField0_ = (bitField0_ & ~0x00000002); |
| onChanged(); |
| } else { |
| liveServersBuilder_.clear(); |
| } |
| return this; |
| } |
| public Builder removeLiveServers(int index) { |
| if (liveServersBuilder_ == null) { |
| ensureLiveServersIsMutable(); |
| liveServers_.remove(index); |
| onChanged(); |
| } else { |
| liveServersBuilder_.remove(index); |
| } |
| return this; |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder getLiveServersBuilder( |
| int index) { |
| return getLiveServersFieldBuilder().getBuilder(index); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder getLiveServersOrBuilder( |
| int index) { |
| if (liveServersBuilder_ == null) { |
| return liveServers_.get(index); } else { |
| return liveServersBuilder_.getMessageOrBuilder(index); |
| } |
| } |
| public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder> |
| getLiveServersOrBuilderList() { |
| if (liveServersBuilder_ != null) { |
| return liveServersBuilder_.getMessageOrBuilderList(); |
| } else { |
| return java.util.Collections.unmodifiableList(liveServers_); |
| } |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder addLiveServersBuilder() { |
| return getLiveServersFieldBuilder().addBuilder( |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.getDefaultInstance()); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder addLiveServersBuilder( |
| int index) { |
| return getLiveServersFieldBuilder().addBuilder( |
| index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.getDefaultInstance()); |
| } |
| public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder> |
| getLiveServersBuilderList() { |
| return getLiveServersFieldBuilder().getBuilderList(); |
| } |
| private com.google.protobuf.RepeatedFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder> |
| getLiveServersFieldBuilder() { |
| if (liveServersBuilder_ == null) { |
| liveServersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfoOrBuilder>( |
| liveServers_, |
| ((bitField0_ & 0x00000002) == 0x00000002), |
| getParentForChildren(), |
| isClean()); |
| liveServers_ = null; |
| } |
| return liveServersBuilder_; |
| } |
| |
| // repeated .ServerName deadServers = 3; |
| private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> deadServers_ = |
| java.util.Collections.emptyList(); |
| private void ensureDeadServersIsMutable() { |
| if (!((bitField0_ & 0x00000004) == 0x00000004)) { |
| deadServers_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName>(deadServers_); |
| bitField0_ |= 0x00000004; |
| } |
| } |
| |
| private com.google.protobuf.RepeatedFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> deadServersBuilder_; |
| |
| public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> getDeadServersList() { |
| if (deadServersBuilder_ == null) { |
| return java.util.Collections.unmodifiableList(deadServers_); |
| } else { |
| return deadServersBuilder_.getMessageList(); |
| } |
| } |
| public int getDeadServersCount() { |
| if (deadServersBuilder_ == null) { |
| return deadServers_.size(); |
| } else { |
| return deadServersBuilder_.getCount(); |
| } |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDeadServers(int index) { |
| if (deadServersBuilder_ == null) { |
| return deadServers_.get(index); |
| } else { |
| return deadServersBuilder_.getMessage(index); |
| } |
| } |
| public Builder setDeadServers( |
| int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { |
| if (deadServersBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureDeadServersIsMutable(); |
| deadServers_.set(index, value); |
| onChanged(); |
| } else { |
| deadServersBuilder_.setMessage(index, value); |
| } |
| return this; |
| } |
| public Builder setDeadServers( |
| int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { |
| if (deadServersBuilder_ == null) { |
| ensureDeadServersIsMutable(); |
| deadServers_.set(index, builderForValue.build()); |
| onChanged(); |
| } else { |
| deadServersBuilder_.setMessage(index, builderForValue.build()); |
| } |
| return this; |
| } |
| public Builder addDeadServers(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { |
| if (deadServersBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureDeadServersIsMutable(); |
| deadServers_.add(value); |
| onChanged(); |
| } else { |
| deadServersBuilder_.addMessage(value); |
| } |
| return this; |
| } |
| public Builder addDeadServers( |
| int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { |
| if (deadServersBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureDeadServersIsMutable(); |
| deadServers_.add(index, value); |
| onChanged(); |
| } else { |
| deadServersBuilder_.addMessage(index, value); |
| } |
| return this; |
| } |
| public Builder addDeadServers( |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { |
| if (deadServersBuilder_ == null) { |
| ensureDeadServersIsMutable(); |
| deadServers_.add(builderForValue.build()); |
| onChanged(); |
| } else { |
| deadServersBuilder_.addMessage(builderForValue.build()); |
| } |
| return this; |
| } |
| public Builder addDeadServers( |
| int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { |
| if (deadServersBuilder_ == null) { |
| ensureDeadServersIsMutable(); |
| deadServers_.add(index, builderForValue.build()); |
| onChanged(); |
| } else { |
| deadServersBuilder_.addMessage(index, builderForValue.build()); |
| } |
| return this; |
| } |
| public Builder addAllDeadServers( |
| java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> values) { |
| if (deadServersBuilder_ == null) { |
| ensureDeadServersIsMutable(); |
| super.addAll(values, deadServers_); |
| onChanged(); |
| } else { |
| deadServersBuilder_.addAllMessages(values); |
| } |
| return this; |
| } |
| public Builder clearDeadServers() { |
| if (deadServersBuilder_ == null) { |
| deadServers_ = java.util.Collections.emptyList(); |
| bitField0_ = (bitField0_ & ~0x00000004); |
| onChanged(); |
| } else { |
| deadServersBuilder_.clear(); |
| } |
| return this; |
| } |
| public Builder removeDeadServers(int index) { |
| if (deadServersBuilder_ == null) { |
| ensureDeadServersIsMutable(); |
| deadServers_.remove(index); |
| onChanged(); |
| } else { |
| deadServersBuilder_.remove(index); |
| } |
| return this; |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getDeadServersBuilder( |
| int index) { |
| return getDeadServersFieldBuilder().getBuilder(index); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDeadServersOrBuilder( |
| int index) { |
| if (deadServersBuilder_ == null) { |
| return deadServers_.get(index); } else { |
| return deadServersBuilder_.getMessageOrBuilder(index); |
| } |
| } |
| public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> |
| getDeadServersOrBuilderList() { |
| if (deadServersBuilder_ != null) { |
| return deadServersBuilder_.getMessageOrBuilderList(); |
| } else { |
| return java.util.Collections.unmodifiableList(deadServers_); |
| } |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addDeadServersBuilder() { |
| return getDeadServersFieldBuilder().addBuilder( |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addDeadServersBuilder( |
| int index) { |
| return getDeadServersFieldBuilder().addBuilder( |
| index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); |
| } |
| public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder> |
| getDeadServersBuilderList() { |
| return getDeadServersFieldBuilder().getBuilderList(); |
| } |
| private com.google.protobuf.RepeatedFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> |
| getDeadServersFieldBuilder() { |
| if (deadServersBuilder_ == null) { |
| deadServersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( |
| deadServers_, |
| ((bitField0_ & 0x00000004) == 0x00000004), |
| getParentForChildren(), |
| isClean()); |
| deadServers_ = null; |
| } |
| return deadServersBuilder_; |
| } |
| |
| // repeated .RegionInTransition regionsInTransition = 4; |
| private java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition> regionsInTransition_ = |
| java.util.Collections.emptyList(); |
| private void ensureRegionsInTransitionIsMutable() { |
| if (!((bitField0_ & 0x00000008) == 0x00000008)) { |
| regionsInTransition_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition>(regionsInTransition_); |
| bitField0_ |= 0x00000008; |
| } |
| } |
| |
| private com.google.protobuf.RepeatedFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder> regionsInTransitionBuilder_; |
| |
| public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition> getRegionsInTransitionList() { |
| if (regionsInTransitionBuilder_ == null) { |
| return java.util.Collections.unmodifiableList(regionsInTransition_); |
| } else { |
| return regionsInTransitionBuilder_.getMessageList(); |
| } |
| } |
| public int getRegionsInTransitionCount() { |
| if (regionsInTransitionBuilder_ == null) { |
| return regionsInTransition_.size(); |
| } else { |
| return regionsInTransitionBuilder_.getCount(); |
| } |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition getRegionsInTransition(int index) { |
| if (regionsInTransitionBuilder_ == null) { |
| return regionsInTransition_.get(index); |
| } else { |
| return regionsInTransitionBuilder_.getMessage(index); |
| } |
| } |
| public Builder setRegionsInTransition( |
| int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition value) { |
| if (regionsInTransitionBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureRegionsInTransitionIsMutable(); |
| regionsInTransition_.set(index, value); |
| onChanged(); |
| } else { |
| regionsInTransitionBuilder_.setMessage(index, value); |
| } |
| return this; |
| } |
| public Builder setRegionsInTransition( |
| int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder builderForValue) { |
| if (regionsInTransitionBuilder_ == null) { |
| ensureRegionsInTransitionIsMutable(); |
| regionsInTransition_.set(index, builderForValue.build()); |
| onChanged(); |
| } else { |
| regionsInTransitionBuilder_.setMessage(index, builderForValue.build()); |
| } |
| return this; |
| } |
| public Builder addRegionsInTransition(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition value) { |
| if (regionsInTransitionBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureRegionsInTransitionIsMutable(); |
| regionsInTransition_.add(value); |
| onChanged(); |
| } else { |
| regionsInTransitionBuilder_.addMessage(value); |
| } |
| return this; |
| } |
| public Builder addRegionsInTransition( |
| int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition value) { |
| if (regionsInTransitionBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureRegionsInTransitionIsMutable(); |
| regionsInTransition_.add(index, value); |
| onChanged(); |
| } else { |
| regionsInTransitionBuilder_.addMessage(index, value); |
| } |
| return this; |
| } |
| public Builder addRegionsInTransition( |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder builderForValue) { |
| if (regionsInTransitionBuilder_ == null) { |
| ensureRegionsInTransitionIsMutable(); |
| regionsInTransition_.add(builderForValue.build()); |
| onChanged(); |
| } else { |
| regionsInTransitionBuilder_.addMessage(builderForValue.build()); |
| } |
| return this; |
| } |
| public Builder addRegionsInTransition( |
| int index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder builderForValue) { |
| if (regionsInTransitionBuilder_ == null) { |
| ensureRegionsInTransitionIsMutable(); |
| regionsInTransition_.add(index, builderForValue.build()); |
| onChanged(); |
| } else { |
| regionsInTransitionBuilder_.addMessage(index, builderForValue.build()); |
| } |
| return this; |
| } |
| public Builder addAllRegionsInTransition( |
| java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition> values) { |
| if (regionsInTransitionBuilder_ == null) { |
| ensureRegionsInTransitionIsMutable(); |
| super.addAll(values, regionsInTransition_); |
| onChanged(); |
| } else { |
| regionsInTransitionBuilder_.addAllMessages(values); |
| } |
| return this; |
| } |
| public Builder clearRegionsInTransition() { |
| if (regionsInTransitionBuilder_ == null) { |
| regionsInTransition_ = java.util.Collections.emptyList(); |
| bitField0_ = (bitField0_ & ~0x00000008); |
| onChanged(); |
| } else { |
| regionsInTransitionBuilder_.clear(); |
| } |
| return this; |
| } |
| public Builder removeRegionsInTransition(int index) { |
| if (regionsInTransitionBuilder_ == null) { |
| ensureRegionsInTransitionIsMutable(); |
| regionsInTransition_.remove(index); |
| onChanged(); |
| } else { |
| regionsInTransitionBuilder_.remove(index); |
| } |
| return this; |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder getRegionsInTransitionBuilder( |
| int index) { |
| return getRegionsInTransitionFieldBuilder().getBuilder(index); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder getRegionsInTransitionOrBuilder( |
| int index) { |
| if (regionsInTransitionBuilder_ == null) { |
| return regionsInTransition_.get(index); } else { |
| return regionsInTransitionBuilder_.getMessageOrBuilder(index); |
| } |
| } |
| public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder> |
| getRegionsInTransitionOrBuilderList() { |
| if (regionsInTransitionBuilder_ != null) { |
| return regionsInTransitionBuilder_.getMessageOrBuilderList(); |
| } else { |
| return java.util.Collections.unmodifiableList(regionsInTransition_); |
| } |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder addRegionsInTransitionBuilder() { |
| return getRegionsInTransitionFieldBuilder().addBuilder( |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.getDefaultInstance()); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder addRegionsInTransitionBuilder( |
| int index) { |
| return getRegionsInTransitionFieldBuilder().addBuilder( |
| index, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.getDefaultInstance()); |
| } |
| public java.util.List<org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder> |
| getRegionsInTransitionBuilderList() { |
| return getRegionsInTransitionFieldBuilder().getBuilderList(); |
| } |
| private com.google.protobuf.RepeatedFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder> |
| getRegionsInTransitionFieldBuilder() { |
| if (regionsInTransitionBuilder_ == null) { |
| regionsInTransitionBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransitionOrBuilder>( |
| regionsInTransition_, |
| ((bitField0_ & 0x00000008) == 0x00000008), |
| getParentForChildren(), |
| isClean()); |
| regionsInTransition_ = null; |
| } |
| return regionsInTransitionBuilder_; |
| } |
| |
| // optional .ClusterId clusterId = 5; |
| private org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId clusterId_ = org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance(); |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder> clusterIdBuilder_; |
| public boolean hasClusterId() { |
| return ((bitField0_ & 0x00000010) == 0x00000010); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId getClusterId() { |
| if (clusterIdBuilder_ == null) { |
| return clusterId_; |
| } else { |
| return clusterIdBuilder_.getMessage(); |
| } |
| } |
| public Builder setClusterId(org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId value) { |
| if (clusterIdBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| clusterId_ = value; |
| onChanged(); |
| } else { |
| clusterIdBuilder_.setMessage(value); |
| } |
| bitField0_ |= 0x00000010; |
| return this; |
| } |
| public Builder setClusterId( |
| org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder builderForValue) { |
| if (clusterIdBuilder_ == null) { |
| clusterId_ = builderForValue.build(); |
| onChanged(); |
| } else { |
| clusterIdBuilder_.setMessage(builderForValue.build()); |
| } |
| bitField0_ |= 0x00000010; |
| return this; |
| } |
| public Builder mergeClusterId(org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId value) { |
| if (clusterIdBuilder_ == null) { |
| if (((bitField0_ & 0x00000010) == 0x00000010) && |
| clusterId_ != org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance()) { |
| clusterId_ = |
| org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.newBuilder(clusterId_).mergeFrom(value).buildPartial(); |
| } else { |
| clusterId_ = value; |
| } |
| onChanged(); |
| } else { |
| clusterIdBuilder_.mergeFrom(value); |
| } |
| bitField0_ |= 0x00000010; |
| return this; |
| } |
| public Builder clearClusterId() { |
| if (clusterIdBuilder_ == null) { |
| clusterId_ = org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.getDefaultInstance(); |
| onChanged(); |
| } else { |
| clusterIdBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000010); |
| return this; |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder getClusterIdBuilder() { |
| bitField0_ |= 0x00000010; |
| onChanged(); |
| return getClusterIdFieldBuilder().getBuilder(); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder getClusterIdOrBuilder() { |
| if (clusterIdBuilder_ != null) { |
| return clusterIdBuilder_.getMessageOrBuilder(); |
| } else { |
| return clusterId_; |
| } |
| } |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder> |
| getClusterIdFieldBuilder() { |
| if (clusterIdBuilder_ == null) { |
| clusterIdBuilder_ = new com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterId.Builder, org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.ClusterIdOrBuilder>( |
| clusterId_, |
| getParentForChildren(), |
| isClean()); |
| clusterId_ = null; |
| } |
| return clusterIdBuilder_; |
| } |
| |
| // repeated .Coprocessor masterCoprocessors = 6; |
| private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor> masterCoprocessors_ = |
| java.util.Collections.emptyList(); |
| private void ensureMasterCoprocessorsIsMutable() { |
| if (!((bitField0_ & 0x00000020) == 0x00000020)) { |
| masterCoprocessors_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor>(masterCoprocessors_); |
| bitField0_ |= 0x00000020; |
| } |
| } |
| |
| private com.google.protobuf.RepeatedFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder> masterCoprocessorsBuilder_; |
| |
| public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor> getMasterCoprocessorsList() { |
| if (masterCoprocessorsBuilder_ == null) { |
| return java.util.Collections.unmodifiableList(masterCoprocessors_); |
| } else { |
| return masterCoprocessorsBuilder_.getMessageList(); |
| } |
| } |
| public int getMasterCoprocessorsCount() { |
| if (masterCoprocessorsBuilder_ == null) { |
| return masterCoprocessors_.size(); |
| } else { |
| return masterCoprocessorsBuilder_.getCount(); |
| } |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor getMasterCoprocessors(int index) { |
| if (masterCoprocessorsBuilder_ == null) { |
| return masterCoprocessors_.get(index); |
| } else { |
| return masterCoprocessorsBuilder_.getMessage(index); |
| } |
| } |
| public Builder setMasterCoprocessors( |
| int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { |
| if (masterCoprocessorsBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureMasterCoprocessorsIsMutable(); |
| masterCoprocessors_.set(index, value); |
| onChanged(); |
| } else { |
| masterCoprocessorsBuilder_.setMessage(index, value); |
| } |
| return this; |
| } |
| public Builder setMasterCoprocessors( |
| int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { |
| if (masterCoprocessorsBuilder_ == null) { |
| ensureMasterCoprocessorsIsMutable(); |
| masterCoprocessors_.set(index, builderForValue.build()); |
| onChanged(); |
| } else { |
| masterCoprocessorsBuilder_.setMessage(index, builderForValue.build()); |
| } |
| return this; |
| } |
| public Builder addMasterCoprocessors(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { |
| if (masterCoprocessorsBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureMasterCoprocessorsIsMutable(); |
| masterCoprocessors_.add(value); |
| onChanged(); |
| } else { |
| masterCoprocessorsBuilder_.addMessage(value); |
| } |
| return this; |
| } |
| public Builder addMasterCoprocessors( |
| int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor value) { |
| if (masterCoprocessorsBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureMasterCoprocessorsIsMutable(); |
| masterCoprocessors_.add(index, value); |
| onChanged(); |
| } else { |
| masterCoprocessorsBuilder_.addMessage(index, value); |
| } |
| return this; |
| } |
| public Builder addMasterCoprocessors( |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { |
| if (masterCoprocessorsBuilder_ == null) { |
| ensureMasterCoprocessorsIsMutable(); |
| masterCoprocessors_.add(builderForValue.build()); |
| onChanged(); |
| } else { |
| masterCoprocessorsBuilder_.addMessage(builderForValue.build()); |
| } |
| return this; |
| } |
| public Builder addMasterCoprocessors( |
| int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder builderForValue) { |
| if (masterCoprocessorsBuilder_ == null) { |
| ensureMasterCoprocessorsIsMutable(); |
| masterCoprocessors_.add(index, builderForValue.build()); |
| onChanged(); |
| } else { |
| masterCoprocessorsBuilder_.addMessage(index, builderForValue.build()); |
| } |
| return this; |
| } |
| public Builder addAllMasterCoprocessors( |
| java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor> values) { |
| if (masterCoprocessorsBuilder_ == null) { |
| ensureMasterCoprocessorsIsMutable(); |
| super.addAll(values, masterCoprocessors_); |
| onChanged(); |
| } else { |
| masterCoprocessorsBuilder_.addAllMessages(values); |
| } |
| return this; |
| } |
| public Builder clearMasterCoprocessors() { |
| if (masterCoprocessorsBuilder_ == null) { |
| masterCoprocessors_ = java.util.Collections.emptyList(); |
| bitField0_ = (bitField0_ & ~0x00000020); |
| onChanged(); |
| } else { |
| masterCoprocessorsBuilder_.clear(); |
| } |
| return this; |
| } |
| public Builder removeMasterCoprocessors(int index) { |
| if (masterCoprocessorsBuilder_ == null) { |
| ensureMasterCoprocessorsIsMutable(); |
| masterCoprocessors_.remove(index); |
| onChanged(); |
| } else { |
| masterCoprocessorsBuilder_.remove(index); |
| } |
| return this; |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder getMasterCoprocessorsBuilder( |
| int index) { |
| return getMasterCoprocessorsFieldBuilder().getBuilder(index); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder getMasterCoprocessorsOrBuilder( |
| int index) { |
| if (masterCoprocessorsBuilder_ == null) { |
| return masterCoprocessors_.get(index); } else { |
| return masterCoprocessorsBuilder_.getMessageOrBuilder(index); |
| } |
| } |
| public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder> |
| getMasterCoprocessorsOrBuilderList() { |
| if (masterCoprocessorsBuilder_ != null) { |
| return masterCoprocessorsBuilder_.getMessageOrBuilderList(); |
| } else { |
| return java.util.Collections.unmodifiableList(masterCoprocessors_); |
| } |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder addMasterCoprocessorsBuilder() { |
| return getMasterCoprocessorsFieldBuilder().addBuilder( |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance()); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder addMasterCoprocessorsBuilder( |
| int index) { |
| return getMasterCoprocessorsFieldBuilder().addBuilder( |
| index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.getDefaultInstance()); |
| } |
| public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder> |
| getMasterCoprocessorsBuilderList() { |
| return getMasterCoprocessorsFieldBuilder().getBuilderList(); |
| } |
| private com.google.protobuf.RepeatedFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder> |
| getMasterCoprocessorsFieldBuilder() { |
| if (masterCoprocessorsBuilder_ == null) { |
| masterCoprocessorsBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.Coprocessor.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CoprocessorOrBuilder>( |
| masterCoprocessors_, |
| ((bitField0_ & 0x00000020) == 0x00000020), |
| getParentForChildren(), |
| isClean()); |
| masterCoprocessors_ = null; |
| } |
| return masterCoprocessorsBuilder_; |
| } |
| |
| // optional .ServerName master = 7; |
| private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> masterBuilder_; |
| public boolean hasMaster() { |
| return ((bitField0_ & 0x00000040) == 0x00000040); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getMaster() { |
| if (masterBuilder_ == null) { |
| return master_; |
| } else { |
| return masterBuilder_.getMessage(); |
| } |
| } |
| public Builder setMaster(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { |
| if (masterBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| master_ = value; |
| onChanged(); |
| } else { |
| masterBuilder_.setMessage(value); |
| } |
| bitField0_ |= 0x00000040; |
| return this; |
| } |
| public Builder setMaster( |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { |
| if (masterBuilder_ == null) { |
| master_ = builderForValue.build(); |
| onChanged(); |
| } else { |
| masterBuilder_.setMessage(builderForValue.build()); |
| } |
| bitField0_ |= 0x00000040; |
| return this; |
| } |
| public Builder mergeMaster(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { |
| if (masterBuilder_ == null) { |
| if (((bitField0_ & 0x00000040) == 0x00000040) && |
| master_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { |
| master_ = |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(master_).mergeFrom(value).buildPartial(); |
| } else { |
| master_ = value; |
| } |
| onChanged(); |
| } else { |
| masterBuilder_.mergeFrom(value); |
| } |
| bitField0_ |= 0x00000040; |
| return this; |
| } |
| public Builder clearMaster() { |
| if (masterBuilder_ == null) { |
| master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); |
| onChanged(); |
| } else { |
| masterBuilder_.clear(); |
| } |
| bitField0_ = (bitField0_ & ~0x00000040); |
| return this; |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getMasterBuilder() { |
| bitField0_ |= 0x00000040; |
| onChanged(); |
| return getMasterFieldBuilder().getBuilder(); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder() { |
| if (masterBuilder_ != null) { |
| return masterBuilder_.getMessageOrBuilder(); |
| } else { |
| return master_; |
| } |
| } |
| private com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> |
| getMasterFieldBuilder() { |
| if (masterBuilder_ == null) { |
| masterBuilder_ = new com.google.protobuf.SingleFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( |
| master_, |
| getParentForChildren(), |
| isClean()); |
| master_ = null; |
| } |
| return masterBuilder_; |
| } |
| |
| // repeated .ServerName backupMasters = 8; |
| private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> backupMasters_ = |
| java.util.Collections.emptyList(); |
| private void ensureBackupMastersIsMutable() { |
| if (!((bitField0_ & 0x00000080) == 0x00000080)) { |
| backupMasters_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName>(backupMasters_); |
| bitField0_ |= 0x00000080; |
| } |
| } |
| |
| private com.google.protobuf.RepeatedFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> backupMastersBuilder_; |
| |
| public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> getBackupMastersList() { |
| if (backupMastersBuilder_ == null) { |
| return java.util.Collections.unmodifiableList(backupMasters_); |
| } else { |
| return backupMastersBuilder_.getMessageList(); |
| } |
| } |
| public int getBackupMastersCount() { |
| if (backupMastersBuilder_ == null) { |
| return backupMasters_.size(); |
| } else { |
| return backupMastersBuilder_.getCount(); |
| } |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getBackupMasters(int index) { |
| if (backupMastersBuilder_ == null) { |
| return backupMasters_.get(index); |
| } else { |
| return backupMastersBuilder_.getMessage(index); |
| } |
| } |
| public Builder setBackupMasters( |
| int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { |
| if (backupMastersBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureBackupMastersIsMutable(); |
| backupMasters_.set(index, value); |
| onChanged(); |
| } else { |
| backupMastersBuilder_.setMessage(index, value); |
| } |
| return this; |
| } |
| public Builder setBackupMasters( |
| int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { |
| if (backupMastersBuilder_ == null) { |
| ensureBackupMastersIsMutable(); |
| backupMasters_.set(index, builderForValue.build()); |
| onChanged(); |
| } else { |
| backupMastersBuilder_.setMessage(index, builderForValue.build()); |
| } |
| return this; |
| } |
| public Builder addBackupMasters(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { |
| if (backupMastersBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureBackupMastersIsMutable(); |
| backupMasters_.add(value); |
| onChanged(); |
| } else { |
| backupMastersBuilder_.addMessage(value); |
| } |
| return this; |
| } |
| public Builder addBackupMasters( |
| int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { |
| if (backupMastersBuilder_ == null) { |
| if (value == null) { |
| throw new NullPointerException(); |
| } |
| ensureBackupMastersIsMutable(); |
| backupMasters_.add(index, value); |
| onChanged(); |
| } else { |
| backupMastersBuilder_.addMessage(index, value); |
| } |
| return this; |
| } |
| public Builder addBackupMasters( |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { |
| if (backupMastersBuilder_ == null) { |
| ensureBackupMastersIsMutable(); |
| backupMasters_.add(builderForValue.build()); |
| onChanged(); |
| } else { |
| backupMastersBuilder_.addMessage(builderForValue.build()); |
| } |
| return this; |
| } |
| public Builder addBackupMasters( |
| int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { |
| if (backupMastersBuilder_ == null) { |
| ensureBackupMastersIsMutable(); |
| backupMasters_.add(index, builderForValue.build()); |
| onChanged(); |
| } else { |
| backupMastersBuilder_.addMessage(index, builderForValue.build()); |
| } |
| return this; |
| } |
| public Builder addAllBackupMasters( |
| java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> values) { |
| if (backupMastersBuilder_ == null) { |
| ensureBackupMastersIsMutable(); |
| super.addAll(values, backupMasters_); |
| onChanged(); |
| } else { |
| backupMastersBuilder_.addAllMessages(values); |
| } |
| return this; |
| } |
| public Builder clearBackupMasters() { |
| if (backupMastersBuilder_ == null) { |
| backupMasters_ = java.util.Collections.emptyList(); |
| bitField0_ = (bitField0_ & ~0x00000080); |
| onChanged(); |
| } else { |
| backupMastersBuilder_.clear(); |
| } |
| return this; |
| } |
| public Builder removeBackupMasters(int index) { |
| if (backupMastersBuilder_ == null) { |
| ensureBackupMastersIsMutable(); |
| backupMasters_.remove(index); |
| onChanged(); |
| } else { |
| backupMastersBuilder_.remove(index); |
| } |
| return this; |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getBackupMastersBuilder( |
| int index) { |
| return getBackupMastersFieldBuilder().getBuilder(index); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getBackupMastersOrBuilder( |
| int index) { |
| if (backupMastersBuilder_ == null) { |
| return backupMasters_.get(index); } else { |
| return backupMastersBuilder_.getMessageOrBuilder(index); |
| } |
| } |
| public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> |
| getBackupMastersOrBuilderList() { |
| if (backupMastersBuilder_ != null) { |
| return backupMastersBuilder_.getMessageOrBuilderList(); |
| } else { |
| return java.util.Collections.unmodifiableList(backupMasters_); |
| } |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addBackupMastersBuilder() { |
| return getBackupMastersFieldBuilder().addBuilder( |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); |
| } |
| public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addBackupMastersBuilder( |
| int index) { |
| return getBackupMastersFieldBuilder().addBuilder( |
| index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); |
| } |
| public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder> |
| getBackupMastersBuilderList() { |
| return getBackupMastersFieldBuilder().getBuilderList(); |
| } |
| private com.google.protobuf.RepeatedFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> |
| getBackupMastersFieldBuilder() { |
| if (backupMastersBuilder_ == null) { |
| backupMastersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( |
| backupMasters_, |
| ((bitField0_ & 0x00000080) == 0x00000080), |
| getParentForChildren(), |
| isClean()); |
| backupMasters_ = null; |
| } |
| return backupMastersBuilder_; |
| } |
| |
| // optional bool balancerOn = 9; |
| private boolean balancerOn_ ; |
| public boolean hasBalancerOn() { |
| return ((bitField0_ & 0x00000100) == 0x00000100); |
| } |
| public boolean getBalancerOn() { |
| return balancerOn_; |
| } |
| public Builder setBalancerOn(boolean value) { |
| bitField0_ |= 0x00000100; |
| balancerOn_ = value; |
| onChanged(); |
| return this; |
| } |
| public Builder clearBalancerOn() { |
| bitField0_ = (bitField0_ & ~0x00000100); |
| balancerOn_ = false; |
| onChanged(); |
| return this; |
| } |
| |
| // @@protoc_insertion_point(builder_scope:ClusterStatus) |
| } |
| |
| static { |
| defaultInstance = new ClusterStatus(true); |
| defaultInstance.initFields(); |
| } |
| |
| // @@protoc_insertion_point(class_scope:ClusterStatus) |
| } |
| |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_RegionState_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_RegionState_fieldAccessorTable; |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_RegionInTransition_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_RegionInTransition_fieldAccessorTable; |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_LiveServerInfo_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_LiveServerInfo_fieldAccessorTable; |
| private static com.google.protobuf.Descriptors.Descriptor |
| internal_static_ClusterStatus_descriptor; |
| private static |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable |
| internal_static_ClusterStatus_fieldAccessorTable; |
| |
| public static com.google.protobuf.Descriptors.FileDescriptor |
| getDescriptor() { |
| return descriptor; |
| } |
| private static com.google.protobuf.Descriptors.FileDescriptor |
| descriptor; |
| static { |
| java.lang.String[] descriptorData = { |
| "\n\023ClusterStatus.proto\032\013hbase.proto\032\017Clus" + |
| "terId.proto\032\010FS.proto\"\346\001\n\013RegionState\022\037\n" + |
| "\nregionInfo\030\001 \002(\0132\013.RegionInfo\022!\n\005state\030" + |
| "\002 \002(\0162\022.RegionState.State\022\r\n\005stamp\030\003 \001(\004" + |
| "\"\203\001\n\005State\022\013\n\007OFFLINE\020\000\022\020\n\014PENDING_OPEN\020" + |
| "\001\022\013\n\007OPENING\020\002\022\010\n\004OPEN\020\003\022\021\n\rPENDING_CLOS" + |
| "E\020\004\022\013\n\007CLOSING\020\005\022\n\n\006CLOSED\020\006\022\r\n\tSPLITTIN" + |
| "G\020\007\022\t\n\005SPLIT\020\010\"W\n\022RegionInTransition\022\036\n\004" + |
| "spec\030\001 \002(\0132\020.RegionSpecifier\022!\n\013regionSt" + |
| "ate\030\002 \002(\0132\014.RegionState\"N\n\016LiveServerInf", |
| "o\022\033\n\006server\030\001 \002(\0132\013.ServerName\022\037\n\nserver" + |
| "Load\030\002 \002(\0132\013.ServerLoad\"\327\002\n\rClusterStatu" + |
| "s\022.\n\014hbaseVersion\030\001 \001(\0132\030.HBaseVersionFi" + |
| "leContent\022$\n\013liveServers\030\002 \003(\0132\017.LiveSer" + |
| "verInfo\022 \n\013deadServers\030\003 \003(\0132\013.ServerNam" + |
| "e\0220\n\023regionsInTransition\030\004 \003(\0132\023.RegionI" + |
| "nTransition\022\035\n\tclusterId\030\005 \001(\0132\n.Cluster" + |
| "Id\022(\n\022masterCoprocessors\030\006 \003(\0132\014.Coproce" + |
| "ssor\022\033\n\006master\030\007 \001(\0132\013.ServerName\022\"\n\rbac" + |
| "kupMasters\030\010 \003(\0132\013.ServerName\022\022\n\nbalance", |
| "rOn\030\t \001(\010BF\n*org.apache.hadoop.hbase.pro" + |
| "tobuf.generatedB\023ClusterStatusProtosH\001\240\001" + |
| "\001" |
| }; |
| com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = |
| new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { |
| public com.google.protobuf.ExtensionRegistry assignDescriptors( |
| com.google.protobuf.Descriptors.FileDescriptor root) { |
| descriptor = root; |
| internal_static_RegionState_descriptor = |
| getDescriptor().getMessageTypes().get(0); |
| internal_static_RegionState_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_RegionState_descriptor, |
| new java.lang.String[] { "RegionInfo", "State", "Stamp", }, |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.class, |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.Builder.class); |
| internal_static_RegionInTransition_descriptor = |
| getDescriptor().getMessageTypes().get(1); |
| internal_static_RegionInTransition_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_RegionInTransition_descriptor, |
| new java.lang.String[] { "Spec", "RegionState", }, |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.class, |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionInTransition.Builder.class); |
| internal_static_LiveServerInfo_descriptor = |
| getDescriptor().getMessageTypes().get(2); |
| internal_static_LiveServerInfo_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_LiveServerInfo_descriptor, |
| new java.lang.String[] { "Server", "ServerLoad", }, |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.class, |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.LiveServerInfo.Builder.class); |
| internal_static_ClusterStatus_descriptor = |
| getDescriptor().getMessageTypes().get(3); |
| internal_static_ClusterStatus_fieldAccessorTable = new |
| com.google.protobuf.GeneratedMessage.FieldAccessorTable( |
| internal_static_ClusterStatus_descriptor, |
| new java.lang.String[] { "HbaseVersion", "LiveServers", "DeadServers", "RegionsInTransition", "ClusterId", "MasterCoprocessors", "Master", "BackupMasters", "BalancerOn", }, |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.class, |
| org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.ClusterStatus.Builder.class); |
| return null; |
| } |
| }; |
| com.google.protobuf.Descriptors.FileDescriptor |
| .internalBuildGeneratedFileFrom(descriptorData, |
| new com.google.protobuf.Descriptors.FileDescriptor[] { |
| org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), |
| org.apache.hadoop.hbase.protobuf.generated.ClusterIdProtos.getDescriptor(), |
| org.apache.hadoop.hbase.protobuf.generated.FSProtos.getDescriptor(), |
| }, assigner); |
| } |
| |
| // @@protoc_insertion_point(outer_class_scope) |
| } |